From 23ff5323c756faf3d8165253c73d148501331393 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Thu, 18 May 2023 14:16:32 +0800 Subject: [PATCH 01/52] [Doc] add mmyolo yolo-pose results (#2374) --- projects/yolox-pose/README.md | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/projects/yolox-pose/README.md b/projects/yolox-pose/README.md index 4dd3aa1c70..264b65fe9f 100644 --- a/projects/yolox-pose/README.md +++ b/projects/yolox-pose/README.md @@ -98,9 +98,20 @@ Results on COCO val2017 We have only trained models with an input size of 640, as we couldn't replicate the performance enhancement mentioned in the paper when increasing the input size from 640 to 960. We warmly welcome any contributions if you can successfully reproduce the results from the paper! +**NEW!** + +[MMYOLO](https://github.com/open-mmlab/mmyolo/blob/dev/configs/yolox/README.md#yolox-pose) also supports YOLOX-Pose and achieves better performance. Their models are fully compatible with this project. Here are their results on COCO val2017: + +| Backbone | Size | Batch Size | AMP | RTMDet-Hyp | Mem (GB) | AP | Config | Download | +| :--------: | :--: | :--------: | :-: | :--------: | :------: | :--: | :------------------------------------------------------------------------: | :---------------------------------------------------------------------------: | +| YOLOX-tiny | 416 | 8xb32 | Yes | Yes | 5.3 | 52.8 | [config](https://github.com/open-mmlab/mmyolo/blob/dev/configs/yolox/pose/yolox-pose_tiny_8xb32-300e-rtmdet-hyp_coco.py) | [model](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_tiny_8xb32-300e-rtmdet-hyp_coco/yolox-pose_tiny_8xb32-300e-rtmdet-hyp_coco_20230427_080351-2117af67.pth) \| [log](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_tiny_8xb32-300e-rtmdet-hyp_coco/yolox-pose_tiny_8xb32-300e-rtmdet-hyp_coco_20230427_080351.log.json) | +| YOLOX-s | 640 | 8xb32 | Yes | Yes | 10.7 | 63.7 | [config](https://github.com/open-mmlab/mmyolo/blob/dev/configs/yolox/pose/yolox-pose_s_8xb32-300e-rtmdet-hyp_coco.py) | [model](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_s_8xb32-300e-rtmdet-hyp_coco/yolox-pose_s_8xb32-300e-rtmdet-hyp_coco_20230427_005150-e87d843a.pth) \| [log](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_s_8xb32-300e-rtmdet-hyp_coco/yolox-pose_s_8xb32-300e-rtmdet-hyp_coco_20230427_005150.log.json) | +| YOLOX-m | 640 | 8xb32 | Yes | Yes | 19.2 | 69.3 | [config](https://github.com/open-mmlab/mmyolo/blob/dev/configs/yolox/pose/yolox-pose_m_8xb32-300e-rtmdet-hyp_coco.py) | [model](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_m_8xb32-300e-rtmdet-hyp_coco/yolox-pose_m_8xb32-300e-rtmdet-hyp_coco_20230427_094024-bbeacc1c.pth) \| [log](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_m_8xb32-300e-rtmdet-hyp_coco/yolox-pose_m_8xb32-300e-rtmdet-hyp_coco_20230427_094024.log.json) | +| YOLOX-l | 640 | 8xb32 | Yes | Yes | 30.3 | 71.1 | [config](https://github.com/open-mmlab/mmyolo/blob/dev/configs/yolox/pose/yolox-pose_l_8xb32-300e-rtmdet-hyp_coco.py) | [model](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_l_8xb32-300e-rtmdet-hyp_coco/yolox-pose_l_8xb32-300e-rtmdet-hyp_coco_20230427_041140-82d65ac8.pth) \| [log](https://download.openmmlab.com/mmyolo/v0/yolox/pose/yolox-pose_l_8xb32-300e-rtmdet-hyp_coco/yolox-pose_l_8xb32-300e-rtmdet-hyp_coco_20230427_041140.log.json) | + ## Citation -If this project benefits your work, please kindly consider citing the original paper: +If this project benefits your work, please kindly consider citing the original papers: ```bibtex @inproceedings{maji2022yolo, @@ -112,6 +123,15 @@ If this project benefits your work, please kindly consider citing the original p } ``` +```bibtex +@article{yolox2021, + title={{YOLOX}: Exceeding YOLO Series in 2021}, + author={Ge, Zheng and Liu, Songtao and Wang, Feng and Li, Zeming and Sun, Jian}, + journal={arXiv preprint arXiv:2107.08430}, + year={2021} +} +``` + Additionally, please cite our work as well: ```bibtex From a83e7dee79848925d90f885e6d06b036ac7bfaf2 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Thu, 18 May 2023 19:33:03 +0800 Subject: [PATCH 02/52] [Fix] fix YOLOX-Pose Inferencer with webcam input (#2378) --- mmpose/apis/inferencers/base_mmpose_inferencer.py | 3 --- projects/yolox-pose/configs/_base_/default_runtime.py | 2 +- projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py | 2 +- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index 86e61463b6..0f966e9b0f 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -159,9 +159,6 @@ def _get_webcam_inputs(self, inputs: str) -> Generator: Raises: ValueError: If the inputs string is not in the expected format. """ - assert getattr(self.visualizer, 'backend', None) == 'opencv', \ - 'Visualizer must utilize the OpenCV backend in order to ' \ - 'support webcam inputs.' # Ensure the inputs string is in the expected format. inputs = inputs.lower() diff --git a/projects/yolox-pose/configs/_base_/default_runtime.py b/projects/yolox-pose/configs/_base_/default_runtime.py index 1f12ce3564..7057585015 100644 --- a/projects/yolox-pose/configs/_base_/default_runtime.py +++ b/projects/yolox-pose/configs/_base_/default_runtime.py @@ -33,7 +33,7 @@ resume = False # file I/O backend -file_client_args = dict(backend='disk') +backend_args = dict(backend='local') # training/validation/testing progress train_cfg = dict() diff --git a/projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py b/projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py index f0cda72544..1854e51e1d 100644 --- a/projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py +++ b/projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py @@ -92,7 +92,7 @@ # pipelines pre_transform = [ - dict(type='LoadImageFromFile', file_client_args=_base_.file_client_args), + dict(type='mmpose.LoadImage', backend_args=_base_.backend_args), dict(type='PoseToDetConverter') ] From 65ef573b6ff39505729bc47824df83395600f92a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=B0=B8=E9=9F=AC?= <53283758+Dominic23331@users.noreply.github.com> Date: Sat, 20 May 2023 15:53:56 +0800 Subject: [PATCH 03/52] [Feature] Support AnimalKingdom dataset (#2139) --- configs/_base_/datasets/ak.py | 267 ++++++++ .../topdown_heatmap/README.md | 14 + .../topdown_heatmap/ak/hrnet_animalkingdom.md | 47 ++ .../ak/hrnet_animalkingdom.yml | 86 +++ ...w32_8xb32-300e_animalkingdom_P1-256x256.py | 146 +++++ ...w32_8xb32-300e_animalkingdom_P2-256x256.py | 146 +++++ ...300e_animalkingdom_P3_amphibian-256x256.py | 146 +++++ ...xb32-300e_animalkingdom_P3_bird-256x256.py | 146 +++++ ...xb32-300e_animalkingdom_P3_fish-256x256.py | 146 +++++ ...32-300e_animalkingdom_P3_mammal-256x256.py | 146 +++++ ...2-300e_animalkingdom_P3_reptile-256x256.py | 146 +++++ docs/en/dataset_zoo/2d_animal_keypoint.md | 74 ++- docs/src/papers/datasets/animalkingdom.md | 20 + docs/zh_cn/dataset_zoo/2d_animal_keypoint.md | 66 ++ mmpose/datasets/datasets/animal/__init__.py | 4 +- .../datasets/animal/animalkingdom_dataset.py | 86 +++ tests/data/ak/AAOYRUDX/AAOYRUDX_f000027.jpg | Bin 0 -> 81846 bytes tests/data/ak/AAOYRUDX/AAOYRUDX_f000028.jpg | Bin 0 -> 81678 bytes tests/data/ak/test_animalkingdom.json | 589 ++++++++++++++++++ .../test_animalkingdom_dataset.py | 146 +++++ 20 files changed, 2411 insertions(+), 10 deletions(-) create mode 100644 configs/_base_/datasets/ak.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.md create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.yml create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256.py create mode 100644 configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256.py create mode 100644 docs/src/papers/datasets/animalkingdom.md create mode 100644 mmpose/datasets/datasets/animal/animalkingdom_dataset.py create mode 100644 tests/data/ak/AAOYRUDX/AAOYRUDX_f000027.jpg create mode 100644 tests/data/ak/AAOYRUDX/AAOYRUDX_f000028.jpg create mode 100644 tests/data/ak/test_animalkingdom.json create mode 100644 tests/test_datasets/test_datasets/test_animal_datasets/test_animalkingdom_dataset.py diff --git a/configs/_base_/datasets/ak.py b/configs/_base_/datasets/ak.py new file mode 100644 index 0000000000..e8b12f5a31 --- /dev/null +++ b/configs/_base_/datasets/ak.py @@ -0,0 +1,267 @@ +dataset_info = dict( + dataset_name='Animal Kingdom', + paper_info=dict( + author='Singapore University of Technology and Design, Singapore.' + ' Xun Long Ng, Kian Eng Ong, Qichen Zheng,' + ' Yun Ni, Si Yong Yeo, Jun Liu.', + title='Animal Kingdom: ' + 'A Large and Diverse Dataset for Animal Behavior Understanding', + container='Conference on Computer Vision ' + 'and Pattern Recognition (CVPR)', + year='2022', + homepage='https://sutdcv.github.io/Animal-Kingdom', + version='1.0 (2022-06)', + date_created='2022-06', + ), + keypoint_info={ + 0: + dict( + name='Head_Mid_Top', + id=0, + color=(225, 0, 255), + type='upper', + swap=''), + 1: + dict( + name='Eye_Left', + id=1, + color=[220, 20, 60], + type='upper', + swap='Eye_Right'), + 2: + dict( + name='Eye_Right', + id=2, + color=[0, 255, 255], + type='upper', + swap='Eye_Left'), + 3: + dict( + name='Mouth_Front_Top', + id=3, + color=(0, 255, 42), + type='upper', + swap=''), + 4: + dict( + name='Mouth_Back_Left', + id=4, + color=[221, 160, 221], + type='upper', + swap='Mouth_Back_Right'), + 5: + dict( + name='Mouth_Back_Right', + id=5, + color=[135, 206, 250], + type='upper', + swap='Mouth_Back_Left'), + 6: + dict( + name='Mouth_Front_Bottom', + id=6, + color=[50, 205, 50], + type='upper', + swap=''), + 7: + dict( + name='Shoulder_Left', + id=7, + color=[255, 182, 193], + type='upper', + swap='Shoulder_Right'), + 8: + dict( + name='Shoulder_Right', + id=8, + color=[0, 191, 255], + type='upper', + swap='Shoulder_Left'), + 9: + dict( + name='Elbow_Left', + id=9, + color=[255, 105, 180], + type='upper', + swap='Elbow_Right'), + 10: + dict( + name='Elbow_Right', + id=10, + color=[30, 144, 255], + type='upper', + swap='Elbow_Left'), + 11: + dict( + name='Wrist_Left', + id=11, + color=[255, 20, 147], + type='upper', + swap='Wrist_Right'), + 12: + dict( + name='Wrist_Right', + id=12, + color=[0, 0, 255], + type='upper', + swap='Wrist_Left'), + 13: + dict( + name='Torso_Mid_Back', + id=13, + color=(185, 3, 221), + type='upper', + swap=''), + 14: + dict( + name='Hip_Left', + id=14, + color=[255, 215, 0], + type='lower', + swap='Hip_Right'), + 15: + dict( + name='Hip_Right', + id=15, + color=[147, 112, 219], + type='lower', + swap='Hip_Left'), + 16: + dict( + name='Knee_Left', + id=16, + color=[255, 165, 0], + type='lower', + swap='Knee_Right'), + 17: + dict( + name='Knee_Right', + id=17, + color=[138, 43, 226], + type='lower', + swap='Knee_Left'), + 18: + dict( + name='Ankle_Left', + id=18, + color=[255, 140, 0], + type='lower', + swap='Ankle_Right'), + 19: + dict( + name='Ankle_Right', + id=19, + color=[128, 0, 128], + type='lower', + swap='Ankle_Left'), + 20: + dict( + name='Tail_Top_Back', + id=20, + color=(0, 251, 255), + type='lower', + swap=''), + 21: + dict( + name='Tail_Mid_Back', + id=21, + color=[32, 178, 170], + type='lower', + swap=''), + 22: + dict( + name='Tail_End_Back', + id=22, + color=(0, 102, 102), + type='lower', + swap='') + }, + skeleton_info={ + 0: + dict(link=('Eye_Left', 'Head_Mid_Top'), id=0, color=[220, 20, 60]), + 1: + dict(link=('Eye_Right', 'Head_Mid_Top'), id=1, color=[0, 255, 255]), + 2: + dict( + link=('Mouth_Front_Top', 'Mouth_Back_Left'), + id=2, + color=[221, 160, 221]), + 3: + dict( + link=('Mouth_Front_Top', 'Mouth_Back_Right'), + id=3, + color=[135, 206, 250]), + 4: + dict( + link=('Mouth_Front_Bottom', 'Mouth_Back_Left'), + id=4, + color=[221, 160, 221]), + 5: + dict( + link=('Mouth_Front_Bottom', 'Mouth_Back_Right'), + id=5, + color=[135, 206, 250]), + 6: + dict( + link=('Head_Mid_Top', 'Torso_Mid_Back'), id=6, + color=(225, 0, 255)), + 7: + dict( + link=('Torso_Mid_Back', 'Tail_Top_Back'), + id=7, + color=(185, 3, 221)), + 8: + dict( + link=('Tail_Top_Back', 'Tail_Mid_Back'), id=8, + color=(0, 251, 255)), + 9: + dict( + link=('Tail_Mid_Back', 'Tail_End_Back'), + id=9, + color=[32, 178, 170]), + 10: + dict( + link=('Head_Mid_Top', 'Shoulder_Left'), + id=10, + color=[255, 182, 193]), + 11: + dict( + link=('Head_Mid_Top', 'Shoulder_Right'), + id=11, + color=[0, 191, 255]), + 12: + dict( + link=('Shoulder_Left', 'Elbow_Left'), id=12, color=[255, 105, + 180]), + 13: + dict( + link=('Shoulder_Right', 'Elbow_Right'), + id=13, + color=[30, 144, 255]), + 14: + dict(link=('Elbow_Left', 'Wrist_Left'), id=14, color=[255, 20, 147]), + 15: + dict(link=('Elbow_Right', 'Wrist_Right'), id=15, color=[0, 0, 255]), + 16: + dict(link=('Tail_Top_Back', 'Hip_Left'), id=16, color=[255, 215, 0]), + 17: + dict( + link=('Tail_Top_Back', 'Hip_Right'), id=17, color=[147, 112, 219]), + 18: + dict(link=('Hip_Left', 'Knee_Left'), id=18, color=[255, 165, 0]), + 19: + dict(link=('Hip_Right', 'Knee_Right'), id=19, color=[138, 43, 226]), + 20: + dict(link=('Knee_Left', 'Ankle_Left'), id=20, color=[255, 140, 0]), + 21: + dict(link=('Knee_Right', 'Ankle_Right'), id=21, color=[128, 0, 128]) + }, + joint_weights=[ + 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., + 1., 1., 1., 1., 1. + ], + sigmas=[ + 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, + 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, 0.025, + 0.025, 0.025, 0.025 + ]) diff --git a/configs/animal_2d_keypoint/topdown_heatmap/README.md b/configs/animal_2d_keypoint/topdown_heatmap/README.md index b4f8e366ff..e799273b8b 100644 --- a/configs/animal_2d_keypoint/topdown_heatmap/README.md +++ b/configs/animal_2d_keypoint/topdown_heatmap/README.md @@ -52,3 +52,17 @@ Results on Grévy’s Zebra test set | ResNet-152 | 160x160 | 0.921 | 1.67 | [resnet_zebra.md](./zebra/resnet_zebra.md) | | ResNet-101 | 160x160 | 0.915 | 1.83 | [resnet_zebra.md](./zebra/resnet_zebra.md) | | ResNet-50 | 160x160 | 0.914 | 1.87 | [resnet_zebra.md](./zebra/resnet_zebra.md) | + +### Animal-Kingdom Dataset + +Results on AnimalKingdom test set + +| Model | Input Size | class | PCK(0.05) | Details and Download | +| :-------: | :--------: | :-----------: | :-------: | :---------------------------------------------------: | +| HRNet-w32 | 256x256 | P1 | 0.6272 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P2 | 0.3774 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_mammals | 0.5756 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_amphibians | 0.5356 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_reptiles | 0.5 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_birds | 0.7679 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_fishes | 0.636 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.md b/configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.md new file mode 100644 index 0000000000..f32fb49d90 --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.md @@ -0,0 +1,47 @@ + + +
+HRNet (CVPR'2019) + +```bibtex +@inproceedings{sun2019deep, + title={Deep high-resolution representation learning for human pose estimation}, + author={Sun, Ke and Xiao, Bin and Liu, Dong and Wang, Jingdong}, + booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, + pages={5693--5703}, + year={2019} +} +``` + +
+ + + +
+AnimalKingdom (CVPR'2022) + +```bibtex +@InProceedings{ + Ng_2022_CVPR, + author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, + title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2022}, + pages = {19023-19034} + } +``` + +
+ +Results on AnimalKingdom validation set + +| Arch | Input Size | PCK(0.05) | Official Repo | Paper | ckpt | log | +| ------------------------------------------------------ | ---------- | --------- | ------------- | ------ | ------------------------------------------------------ | ------------------------------------------------------ | +| [P1_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256.py) | 256x256 | 0.6323 | 0.6342 | 0.6606 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256-08bf96cb_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256-08bf96cb_20230519.json) | +| [P2_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256.py) | 256x256 | 0.3741 | 0.3726 | 0.393 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256-2396cc58_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256-2396cc58_20230519.json) | +| [P3_mammals_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256.py) | 256x256 | 0.571 | 0.5719 | 0.6159 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256-e8aadf02_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256-e8aadf02_20230519.json) | +| [P3_amphibians_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256.py) | 256x256 | 0.5358 | 0.5432 | 0.5674 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256-845085f9_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256-845085f9_20230519.json) | +| [P3_reptiles_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256.py) | 256x256 | 0.51 | 0.5 | 0.5606 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256-e8440c16_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256-e8440c16_20230519.json) | +| [P3_birds_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256.py) | 256x256 | 0.7671 | 0.7636 | 0.7735 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256-566feff5_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256-566feff5_20230519.json) | +| [P3_fishes_hrnet_w32](configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256.py) | 256x256 | 0.6406 | 0.636 | 0.6825 | [ckpt](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256-76c3999f_20230519.pth) | [log](https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256-76c3999f_20230519.json) | diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.yml b/configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.yml new file mode 100644 index 0000000000..12f208a10b --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/hrnet_animalkingdom.yml @@ -0,0 +1,86 @@ +Models: +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256.py + In Collection: HRNet + Metadata: + Architecture: &id001 + - HRNet + Training Data: AnimalKingdom_P1 + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.6323 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256-08bf96cb_20230519.pth +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: AnimalKingdom_P2 + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.3741 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256-2396cc58_20230519.pth +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: AnimalKingdom_P3_amphibian + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.5358 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256-845085f9_20230519.pth +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: AnimalKingdom_P3_bird + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.7671 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256-566feff5_20230519.pth +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: AnimalKingdom_P3_fish + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.6406 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256-76c3999f_20230519.pth +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: AnimalKingdom_P3_mammal + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.571 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256-e8aadf02_20230519.pth +- Config: configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: AnimalKingdom_P3_reptile + Name: td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256 + Results: + - Dataset: AnimalKingdom + Metrics: + PCK: 0.51 + Task: Animal 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/animal_2d_keypoint/topdown_heatmap/animal_kingdom/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256-e8440c16_20230519.pth diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256.py new file mode 100644 index 0000000000..0e7eb0136e --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P1-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P1/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P1/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256.py new file mode 100644 index 0000000000..f42057f8aa --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P2-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P2/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P2/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256.py new file mode 100644 index 0000000000..5a83e7a97b --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_amphibian-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_amphibian/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_amphibian/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256.py new file mode 100644 index 0000000000..ca3c91af61 --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_bird-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_bird/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_bird/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256.py new file mode 100644 index 0000000000..3923f30d10 --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_fish-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_fish/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_fish/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256.py new file mode 100644 index 0000000000..d061c4b6fb --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_mammal-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_mammal/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_mammal/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256.py new file mode 100644 index 0000000000..b06a49936b --- /dev/null +++ b/configs/animal_2d_keypoint/topdown_heatmap/ak/td-hm_hrnet-w32_8xb32-300e_animalkingdom_P3_reptile-256x256.py @@ -0,0 +1,146 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=300, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='AdamW', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='PCK', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=23, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'AnimalKingdomDataset' +data_mode = 'topdown' +data_root = 'data/ak/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_reptile/train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=24, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/ak_P3_reptile/test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [dict(type='PCKAccuracy', thr=0.05), dict(type='AUC')] +test_evaluator = val_evaluator diff --git a/docs/en/dataset_zoo/2d_animal_keypoint.md b/docs/en/dataset_zoo/2d_animal_keypoint.md index 86b1b70632..11087c2766 100644 --- a/docs/en/dataset_zoo/2d_animal_keypoint.md +++ b/docs/en/dataset_zoo/2d_animal_keypoint.md @@ -13,6 +13,7 @@ MMPose supported datasets: - [Desert Locust](#desert-locust) \[ [Homepage](https://github.com/jgraving/DeepPoseKit-Data) \] - [Grévy’s Zebra](#grvys-zebra) \[ [Homepage](https://github.com/jgraving/DeepPoseKit-Data) \] - [ATRW](#atrw) \[ [Homepage](https://cvwc2019.github.io/challenge.html) \] +- [Animal Kingdom](#Animal-Kindom) \[ [Homepage](https://openaccess.thecvf.com/content/CVPR2022/html/Ng_Animal_Kingdom_A_Large_and_Diverse_Dataset_for_Animal_Behavior_CVPR_2022_paper.html) \] ## Animal-Pose @@ -94,7 +95,6 @@ mmpose │ │-- dog │ │-- horse │ │-- sheep - ``` The official dataset does not provide the official train/val/test set split. @@ -154,7 +154,6 @@ mmpose │ │-- 000000000001.jpg │ │-- 000000000002.jpg │ │-- ... - ``` The annotation files in 'annotation' folder contains 50 labeled animal species. There are total 10,015 labeled images with 13,028 instances in the AP-10K dataset. We randonly split them into train, val, and test set following the ratio of 7:1:2. @@ -206,7 +205,6 @@ mmpose │ │-- BrownHorseinShadow │ │-- BrownHorseintoshadow │ │-- ... - ``` ## MacaquePose @@ -255,7 +253,6 @@ mmpose │ │-- 020a1c75c8c85238.jpg │ │-- 020b1506eef2557d.jpg │ │-- ... - ``` Since the official dataset does not provide the test set, we randomly select 12500 images for training, and the rest for evaluation (see [code](/tools/dataset/parse_macaquepose_dataset.py)). @@ -308,7 +305,6 @@ mmpose │ │-- 2.jpg │ │-- 3.jpg │ │-- ... - ``` Since the official dataset does not provide the test set, we randomly select 90% images for training, and the rest (10%) for evaluation (see [code](/tools/dataset_converters/parse_deepposekit_dataset.py)). @@ -360,7 +356,6 @@ mmpose │ │-- 2.jpg │ │-- 3.jpg │ │-- ... - ``` Since the official dataset does not provide the test set, we randomly select 90% images for training, and the rest (10%) for evaluation (see [code](/tools/dataset_converters/parse_deepposekit_dataset.py)). @@ -389,7 +384,6 @@ Since the official dataset does not provide the test set, we randomly select 90%
- For [Grévy’s Zebra](https://github.com/jgraving/DeepPoseKit-Data) dataset, images can be downloaded from [zebra_images](https://download.openmmlab.com/mmpose/datasets/zebra_images.tar). Please download the annotation files from [zebra_annotations](https://download.openmmlab.com/mmpose/datasets/zebra_annotations.tar). Extract them under {MMPose}/data, and make them look like this: @@ -412,7 +406,6 @@ mmpose │ │-- 2.jpg │ │-- 3.jpg │ │-- ... - ``` Since the official dataset does not provide the test set, we randomly select 90% images for training, and the rest (10%) for evaluation (see [code](/tools/dataset_converters/parse_deepposekit_dataset.py)). @@ -439,7 +432,6 @@ Since the official dataset does not provide the test set, we randomly select 90%
- ATRW captures images of the Amur tiger (also known as Siberian tiger, Northeast-China tiger) in the wild. For [ATRW](https://cvwc2019.github.io/challenge.html) dataset, please download images from [Pose_train](https://lilablobssc.blob.core.windows.net/cvwc2019/train/atrw_pose_train.tar.gz), @@ -476,5 +468,69 @@ mmpose │ │ │-- 000000.jpg │ │ │-- 000004.jpg │ │ │-- ... +``` + +## Animal Kingdom + +
+Animal Kingdom (CVPR'2022) +
+
+ +
+ +```bibtex +@InProceedings{ + Ng_2022_CVPR, + author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, + title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2022}, + pages = {19023-19034} + } +``` +For [Animal Kingdom](https://github.com/sutdcv/Animal-Kingdom) dataset, images can be downloaded from [here](https://forms.office.com/pages/responsepage.aspx?id=drd2NJDpck-5UGJImDFiPVRYpnTEMixKqPJ1FxwK6VZUQkNTSkRISTNORUI2TDBWMUpZTlQ5WUlaSyQlQCN0PWcu). +Please Extract dataset under {MMPose}/data, and make them look like this: + +```text +mmpose +├── mmpose +├── docs +├── tests +├── tools +├── configs +`── data + │── ak + |--annotations + │ │-- ak_P1 + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P2 + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_amphibian + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_bird + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_fish + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_mammal + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_reptile + │ │-- train.json + │ │-- test.json + │-- images + │ │-- AAACXZTV + │ │ │--AAACXZTV_f000059.jpg + │ │ │--... + │ │-- AAAUILHH + │ │ │--AAAUILHH_f000098.jpg + │ │ │--... + │ │-- ... ``` diff --git a/docs/src/papers/datasets/animalkingdom.md b/docs/src/papers/datasets/animalkingdom.md new file mode 100644 index 0000000000..3aa8592331 --- /dev/null +++ b/docs/src/papers/datasets/animalkingdom.md @@ -0,0 +1,20 @@ +# Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding + + + +
+Animal Kingdom (CVPR'2022) + +```bibtex +@InProceedings{ + Ng_2022_CVPR, + author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, + title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2022}, + pages = {19023-19034} + } +``` + +
diff --git a/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md b/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md index 2429602537..21106100db 100644 --- a/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md +++ b/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md @@ -13,6 +13,7 @@ MMPose supported datasets: - [Desert Locust](#desert-locust) \[ [Homepage](https://github.com/jgraving/DeepPoseKit-Data) \] - [Grévy’s Zebra](#grvys-zebra) \[ [Homepage](https://github.com/jgraving/DeepPoseKit-Data) \] - [ATRW](#atrw) \[ [Homepage](https://cvwc2019.github.io/challenge.html) \] +- [Animal Kingdom](#Animal-Kindom) \[ [Homepage](https://openaccess.thecvf.com/content/CVPR2022/html/Ng_Animal_Kingdom_A_Large_and_Diverse_Dataset_for_Animal_Behavior_CVPR_2022_paper.html) \] ## Animal-Pose @@ -478,3 +479,68 @@ mmpose │ │ │-- ... ``` + +## Animal Kingdom + +
+Animal Kingdom (CVPR'2022) +
+
+ +
+ +```bibtex +@InProceedings{ + Ng_2022_CVPR, + author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, + title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2022}, + pages = {19023-19034} + } +``` + +For [Animal Kingdom](https://github.com/sutdcv/Animal-Kingdom) dataset, images can be downloaded from [here](https://forms.office.com/pages/responsepage.aspx?id=drd2NJDpck-5UGJImDFiPVRYpnTEMixKqPJ1FxwK6VZUQkNTSkRISTNORUI2TDBWMUpZTlQ5WUlaSyQlQCN0PWcu). +Please Extract dataset under {MMPose}/data, and make them look like this: + +```text +mmpose +├── mmpose +├── docs +├── tests +├── tools +├── configs +`── data + │── ak + |--annotations + │ │-- ak_P1 + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P2 + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_amphibian + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_bird + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_fish + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_mammal + │ │ │-- train.json + │ │ │-- test.json + │ │-- ak_P3_reptile + │ │-- train.json + │ │-- test.json + │-- images + │ │-- AAACXZTV + │ │ │--AAACXZTV_f000059.jpg + │ │ │--... + │ │-- AAAUILHH + │ │ │--AAAUILHH_f000098.jpg + │ │ │--... + │ │-- ... +``` diff --git a/mmpose/datasets/datasets/animal/__init__.py b/mmpose/datasets/datasets/animal/__init__.py index dfe9b5938c..669f08cddd 100644 --- a/mmpose/datasets/datasets/animal/__init__.py +++ b/mmpose/datasets/datasets/animal/__init__.py @@ -1,4 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. +from .animalkingdom_dataset import AnimalKingdomDataset from .animalpose_dataset import AnimalPoseDataset from .ap10k_dataset import AP10KDataset from .atrw_dataset import ATRWDataset @@ -10,5 +11,6 @@ __all__ = [ 'AnimalPoseDataset', 'AP10KDataset', 'Horse10Dataset', 'MacaqueDataset', - 'FlyDataset', 'LocustDataset', 'ZebraDataset', 'ATRWDataset' + 'FlyDataset', 'LocustDataset', 'ZebraDataset', 'ATRWDataset', + 'AnimalKingdomDataset' ] diff --git a/mmpose/datasets/datasets/animal/animalkingdom_dataset.py b/mmpose/datasets/datasets/animal/animalkingdom_dataset.py new file mode 100644 index 0000000000..35ccb8b67a --- /dev/null +++ b/mmpose/datasets/datasets/animal/animalkingdom_dataset.py @@ -0,0 +1,86 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpose.registry import DATASETS +from ..base import BaseCocoStyleDataset + + +@DATASETS.register_module() +class AnimalKingdomDataset(BaseCocoStyleDataset): + """Animal Kingdom dataset for animal pose estimation. + + "[CVPR2022] Animal Kingdom: + A Large and Diverse Dataset for Animal Behavior Understanding" + More details can be found in the `paper + `__ . + + Website: + + The dataset loads raw features and apply specified transforms + to return a dict containing the image tensors and other information. + + Animal Kingdom keypoint indexes:: + + 0: 'Head_Mid_Top', + 1: 'Eye_Left', + 2: 'Eye_Right', + 3: 'Mouth_Front_Top', + 4: 'Mouth_Back_Left', + 5: 'Mouth_Back_Right', + 6: 'Mouth_Front_Bottom', + 7: 'Shoulder_Left', + 8: 'Shoulder_Right', + 9: 'Elbow_Left', + 10: 'Elbow_Right', + 11: 'Wrist_Left', + 12: 'Wrist_Right', + 13: 'Torso_Mid_Back', + 14: 'Hip_Left', + 15: 'Hip_Right', + 16: 'Knee_Left', + 17: 'Knee_Right', + 18: 'Ankle_Left ', + 19: 'Ankle_Right', + 20: 'Tail_Top_Back', + 21: 'Tail_Mid_Back', + 22: 'Tail_End_Back + + Args: + ann_file (str): Annotation file path. Default: ''. + bbox_file (str, optional): Detection result file path. If + ``bbox_file`` is set, detected bboxes loaded from this file will + be used instead of ground-truth bboxes. This setting is only for + evaluation, i.e., ignored when ``test_mode`` is ``False``. + Default: ``None``. + data_mode (str): Specifies the mode of data samples: ``'topdown'`` or + ``'bottomup'``. In ``'topdown'`` mode, each data sample contains + one instance; while in ``'bottomup'`` mode, each data sample + contains all instances in a image. Default: ``'topdown'`` + metainfo (dict, optional): Meta information for dataset, such as class + information. Default: ``None``. + data_root (str, optional): The root directory for ``data_prefix`` and + ``ann_file``. Default: ``None``. + data_prefix (dict, optional): Prefix for training data. Default: + ``dict(img=None, ann=None)``. + filter_cfg (dict, optional): Config for filter data. Default: `None`. + indices (int or Sequence[int], optional): Support using first few + data in annotation file to facilitate training/testing on a smaller + dataset. Default: ``None`` which means using all ``data_infos``. + serialize_data (bool, optional): Whether to hold memory using + serialized objects, when enabled, data loader workers can use + shared RAM from master process instead of making a copy. + Default: ``True``. + pipeline (list, optional): Processing pipeline. Default: []. + test_mode (bool, optional): ``test_mode=True`` means in test phase. + Default: ``False``. + lazy_init (bool, optional): Whether to load annotation during + instantiation. In some cases, such as visualization, only the meta + information of the dataset is needed, which is not necessary to + load annotation file. ``Basedataset`` can skip load annotations to + save time by set ``lazy_init=False``. Default: ``False``. + max_refetch (int, optional): If ``Basedataset.prepare_data`` get a + None img. The maximum extra number of cycles to get a valid + image. Default: 1000. + """ + + METAINFO: dict = dict(from_file='configs/_base_/datasets/ak.py') diff --git a/tests/data/ak/AAOYRUDX/AAOYRUDX_f000027.jpg b/tests/data/ak/AAOYRUDX/AAOYRUDX_f000027.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0698935984232d4c23e1f212a043457e1dc8179b GIT binary patch literal 81846 zcmbTdWmFtZ+&wtBLx5mG6D&Y*w;_b!5CSB)Lx5p$9UOvtfB*>^La^ZOE(0OB4?2VU z3_3yoJnwtX?w9?ryVa-abf2#7{@t$buBv-)KP)_~1D>cUsVV_5FaQ9I#|7}P1W*8A zV*Ho>D>45|*jWFSIM~=&*tj^jxc`&!2=H<72=H)m@rm#W2>(luYs5r^#Q$ylcar}- z6cYyv3x^O77w>|A>qH5f|ovZGiV5 zE(}bc$Badejq_X(mqK0}&(e+Zg-|H|(>KZaH9Z8Z!a7JQEB8r4YBmu#JL*4Z|AXxR z9k9>;zmWZ3!2WMsFaR+Y#$)4Qkptuaw|!`R6mwrLE^;UKNR*L-C`UnWGHBq}i0HS} z!?7tYU-==}n|Q2ov$TP9`qw(%p@V)iEaKTJcvw|bFLjLBGXGK0Gp+CH@_v?{djNz5 z@Km9vjT?}~5uV05%I|0RD2RCICE`Y8;$5jexT}iBJ=^$ktzUBgH%RIM5TTBuo=40W zIoO(Nc+*4^v z>9-gtec$<)!QJotFU`-wi*D$|Op!evsS&VCr{cUpb+ks&1wLVd?3i_SO#gVIAi__Q zxuS6<`pa41uq~0e5_le!nk#5#4YI3%to1)xdW$g5Dt*(Q63^|u+)s+v;fi2x+x9E7 z&dSAeH*|NE$S2rgH>o*FSDLbyd zc~PB~Z$V4F^u;aSF8&%dmn9d~;PgVnWU1c2E@31ju@WN^mSAF=Ghzs*8p0NDgY+11sp>Vje+Kz^IRX`n!`3i6FrHbr8X>8{NEc>S z9adQ}Afz=~3r+sBu4%#b)2T`IGDgrD~;L*Z8cO#=! zbJ|RQe=BM}(pjQuXSy7IVzK;(ck1pkm4@ibx4nEw09`v}QDHL~LR8MjvGh62c} zNT7J1kvpt0fjAwxJdnp{@cpV3=DRBV*^hyg$9R__sjWcRa}D!W1z8+bDdStXXk-Ga zQK!lA5`wa<=X_WG*O&@r3`&EJ`<9rjENi;nnEC##{f%r(^dnsXmrmBZM0e48eLGJO zBI9Z9|G%*WqPQGkxxca@(H0><5dT;=GcdlBgypeZEiRcQ? zWUPv&8Mu^+H_DZ(V>O^-%ad8($=?WFO>{3-(vtyepmt(JM)h`dk>LryU@LUi1_m)~ zD$92({Q>>gGe&X|qFv-Gg+*zOwwYEc!%&IgVNuiir6f|bmzD!5r0MCj#CAe{T5|M1RqWD@uyN7( z-FpCy>%vnCb++a#M2_w_LY>CVNM2{3q%JeJhvEO>hO2>Qaq9f%wtDs z>g&s0uFpcg90Fhjl&1H_ZQIhs1ke@PHHJ3~W4)zCSfUn3XTAQ|_54b#P<>D@0%)9II>oJ&m_2DedhS9OQ7L zJ9Z#Shg`&bvi=;4ndz>AQmc&m0=jx%UmH_o*&Ox`}Z#cVi2LSYk=Oy*ovqbbYt zsGSM4ElBgH(w8Crqa+d3t;4v(#M@@v3WQ7KW^6`>H<~afpelpum#5Z8zok2_JK)(# zGNn`{QX^o=pm)NW_LU33=aH)Yg1X;tCg0_>iHL{lZoB8S%*Q;yZx%vj!`9}65hyiX zoWfOfW|d?`MXPYWAGl8+=^Qgv?bv(_lzdC5!jPKl@U>PsFfhs;z;HyxJFV6o;_vl4 z-5%AZ@yqTw{$`T#SiGhY_H{DlbckBEnGrppT?-CL>}O12qVtap(@9r(VeId{?7bfE zRi-yui{bo~M=2zQK19o<0?b*OHA}$oc6+H_H8D-JWz^7%(Rw$u=u zS7L8uR9~%HV#8zh%!+TMAUu7ru~PpkbmppC(cXIkCY6mLp>*0!Pnb~9FFm332;DG8 zIk-6}(=;~^=daGnxNMnNJ_DyqvXDDw-s^%qzq33_hbQ}2?=E{4L*-_a1xGOUHT~s| z2!6vrW|Y)Ol?1*z?TnI8P89+Jx@fb%& z9r4Lcmc{Mvzv)6id2CHsPr=$tAnoYfW7@P-EDsxrafhnF06u|k*u@F#XVql2 z{Fz{%`$dvy?;n~Un|>eLvYZZ0)8d5UsZZW#xzS_cOGp1}`LzHv_qA zL1;{|gt$5jp*ugT)(Sk$&brUWvx@qmiQC_m0IaF8GQ2cVcvW79*E8R*@cy_UXK}6 z@l~2`fM`n-U%7|-`Okl(anjhgcc;ln(PBRNmC&zUt@Lgrx2SB%Ks^{E_9-pCu}3Ec z;A0(b^n41`MN-|R&N;b~{=4ynb%VH4lX00YaiSh&A2+~bytvShJZ;QOH>WA|Z=x0b zH@Ow8s;Sltq*7@S%mkf#3|Vh#Y>Hq06HjkF^eeh&*O*a}Rk>KPEU@~mlVg0W6KqR` z1>=l??8K9*4iQWx{{X#6qrgtHt~lN}Le%mcY4siZ_1&Eh_)h~o;5956A>Ybfy^xt9_YcZDdBvMk7^3wFa4Zb8DvNQ1Az3no!{rTJoYH!o7p5s z(v#(r_l(((O=G&8Th!ry=`kTAUcBFw$vBtFTdoI-_WAC6_9f((nBO3+TVlW8Z1W1X zRig4cWasza0Vb38)cdU!EA2wB{BoO$6+RbfhY?$Bu%$~!pO*qTjP@R{*2x7a=!bp! z%DgyfplBEDAq_|Ht+lsc6;KV1_v?L2ut8V+4ltND)PvoaoYpTzZ{pKoeZwNYfXPel zh3W%&UkCVjRX|u(k^h=g@94Gi3}-n=!1h2}yY8Dqtrrc&*abGgbcqK5Cc2>CoIF4d zMc;4aDAeCnXSlFfD%S~qYr^$OhImK1gL@}Fh{$X=m!L!ZN_+1rOrus{&wD+~lhiu{ zz>wkIVx4VUc|UGOI45I>Y%|MKoq1Jd1!T#cf0B^UDe^&wOsCmoW3_eEy3Z)G4gof7 zS9}!~*zTA>^&6EFv#m|v zjYcO1uZk5?|8FAm^@#|@rsVKCFvbEJU>tn5|8%V(U^dDHB3{ztuy_uWEF>X zTo=#ah1rpM>5Benxjqg6)g9>3u$s-DZU@lK|OXU;ip9z}gPUDvW{;m=39cdjbP9pNE# zEH>7aQuEc|*5Kap*dWZ>nc|Kbl;|&4N+-sFA750dCzuETbcu=#p$DUG!VoP4u()Iq zDzwCxWnGf`OWDaP_EhFO7NHaumIUYahK#->igPfC9|`T}C*5Tx6{#S-GM;BYm63-} z-eiDyV{FntMIXgqvAOq!>6It2RVGR?FVw9MR>1wsmb%DB3islz>M6xrOXEoL<}tYGT*c4(iOj$E z5~#=5QW@T+s)HUb8+M^fSEsSBEp=*+(D{S+P7|UZ4P=D$9bVhdT&$SgDBBpm`0(dX zs#Hq;(^D;vBQ>3gLDiD%r2ZrV&^;5%V~?E84T0lh5W`Iy;wn5&^>@YK=vyuSpLUyP z2VP_t2fcmw6xi)FE?CJj9-7uHcX?_1oE*by)WN593=2g z9uG&(R)EM<;O?A!@3-z@&Mqtw*~f{1vB%Us@t)Z;v?eN2j(FLmVJwE}Mmju0aMXR--J>59D!bLDiseIeJ<#lzpO-Z*xX(;Uv?S{ZIeK$iXmRC)CUsUImkW zLoMc~mEdXb!Y6X+Dr&H?IzO|%6aaEYn1~T(3JY9;y!7vTgAdxw%5XUMaWJj3;L^1cvcd0jGh$^IjI|6Llm)!rcAL z@yp&}@92rM;hzEuZ&ZAUmfy~;-HqRE0LrX9v$}bze3i%WN57F@)A1Iq<(XXr+u>LQ zPaCrpI=J^cDlz90ZiDEZyv=C1n<}r1FHcCpp!d6;7e(O>t^tDR?9cXsUZN5c*r!dL zXKY$qXmE9mU!kmlh75qR?y4`Z${zp@{7LSE@+j((3e4z*Hf6fk0k1$ z8jDGrBVytJ@6C;D{SXm9PS~)qWYFJFTSO#F!Cyo#*qD)8rqAX-CaFj@92d*Pvol%a zr#(0CJc&P|-Nlov6yAtB9EH1JEcGDB$mm!TiiR7S+g@R-Ed*|yU_JIBW$qK3Wonvc z&=Bx@(3E-;Xr6vy8(2E#j4&qv=3tf#5`W3c8{Qr=U~-FX)N(`S|B zE?d-v!q8|xGb8%&$YzE=AkNrzYcnl&%Z7K_#qpw27vb``GJvdN(tUqsSPiVhGo9C#6qc_T?-Y{qQcf=AC%UHi$TWEA@BsAlJO%nK%$b_H|jx zTcldwpPt=6{^TEA6#j*dxh+qnP7E9hDgUcB&a2RE^CkKmuZ8!Sd%EOiKdM;HsLZVy zcvA6!%}!*Iq`g+yG7F2bOiQ^dbM?&!XTAu5(Sy}m-Y-#X@$vJ77xwo7KOX?y7{|(|6~)L@#=$|Vg1w}S1_kktfXw8Xh#rsW%-@SbCBqw{P@Xzt z9+GyQ5<}V@hR+U_c~8e*d>4*K;`ucN^uLuHm))1eT&&~}8s+*JZv&9ZEZL1tRb9xH zHZ1lLX2p0y>oS;mE%Rq7)3n4`qCz^#H1tW{dw&nuk39du!e6`oGk^50>lh0_evxfu zOK6uj^}cqkUzD-cg#JP&e5z{Nsn&Uz2?AKmu>w2&y`4mjwpibH?csES*kzRn=#@Q^ z)ONZo_!m=5Ev?Z92o>8zs_%%YnIbI3cvZP+w36&W<6V^(Es*9`rhNw9Y3){N5*VeS z7nH}v{}St_YHtF5N`D$U4-l-)A7SHhAwKNrL;f{B`J@fy6CN$PnqM33yWTW8p(H>W+t+G_~C ztu!`1yUGs&7=0@}KFqSCyj$mSm*sKVRXX2GN=htmJ^&y$^Snl*9i-8^HFj_6kPm<; zPcIa8l^52Ux(~hlWpayEG=sDhsqu_=rO!ZXW2#$;se&bWn#Rbb^kV{1@UF=%d#e@k8_vq<|-@@WqAxJ1^^Ep|&LaD9bdxwS5GaIXkdaK`5QCX_zD z;3aF---@J5H-nZYo(x&-{$-;T+Ro)kxLi2a+_#~h0&o?(i?aENF(;Y7)p4U-&F5?% zsX~U@7LkBGV}@~eR;GFSNUa9dT473|W^6B$GfhE&D&4r1gE|q{+X`s`R=h3e3sOA_ zda7SFN=@#8b9iC%2Szo==>tZzBrUaX6A#?hci05Rtuvu{<2f4*hQyX}KeLgfLSU6E*jL-dili6Otw6B%hE+aNVSMPH7UT_Im%Jw8^KTRQ(;_s;<}vO&jo zx3OE$p}fwI2d#3@gIPG0GYQ6+yJQVv<@Rsn_WnXM`hrT(DI2|=&u3Crx4bYX-aP>J zkG`-x_dQEs27X!CS1=Wg_MGCHEN^@oO^3Ex4ooMC zI)1i+ujwy7Bt#o{ z3xrdAV^M+1*mn58egJ&_5Y+qR0nlRr6uJKke1Y~y@Ki;uVxVR15OE|N(*xo!_Ur_g z+PgYl(e+TMyve}wUUkd7#=0js*cit%5AKnc`^3M5;Oe!sCKA~hzyyaA zzHyVUdGoAXH!`OPZJEk(i~)Tm>yyH(fQ-RCEbY7~;of^`H0VbBRwJ&rad3aFzt!sj z5dF3(LszJ35w?6++UVq%{%@Xjt~2-U_XEIoZGmY04jL829KeP4Kpc=!LwcvxJEy$} zF0L}M>zcp4isAUDx~dEL^BIE`x6|XU-sbLiKrpJHca2=mel27UNZ8sj4IC(j*X^^69<}86RPGfj3+mJr&tSQYS*dv zgh{BT!OW}v@h3r}?t4ZQ5a*YUwt0H@LnHwV+$y@GE4lA5X!i!(@{k|97Beox(9j|X z{Xj6m{nG=fxwyhxf%@3}1Fc%2trn+ZY^QGNm`an6FJ~f@X^UnnK5e&;wNm<-pnQYv z9snV^gm+P0c3lZqtJED`ZK(7%cNyE!5*3`Ia^Y4LO6L6qXtp(;L{(wX%b=dTx}C$T z;(#}I`JxLhRX_#<2>0Mdw^V>pnM-G93m*A*yubU*F0;0BD^qx^Yu;vf8ryC~0y{7M!#wg76y-?5@d=xTQ+@cDB2sJ?%xh019R3G={xN&|#}n zUCq+tr0dH+d+9nkDmz3~^u^u#&TsRldrh#SERc&>o{KZt*U;~8Ua4Oda%NxZzF+L| zfmgW8VL;m6(@aCWePOVfkP$ZNr|i>qrFcosJZ)1VOXhUwUQKn~YnO0&95*X(lsHh)j9j0g(bD-J&I&!O%jY|ApF^;(_u!8RU(AAU(%DbmHkEhc; z>7QGo$9@1jVaTiS>g1`JZkJxaGw-|iFd_JvH%zXazQjU%xsn}!t*?_JKQul_Z=>}p zpn*SqNb;EbWzcKEyh8QnAqZQN-$gyxV?;E7GG7ysLF9__^K~m zHe+VKW*8aP69&C!LFTq`Ub-A-I#EphJ^9G}M$qG~cM&?*KD~aP;pda+x#B*q4!)dr zPB~oM!SvGoYjcV1Q}wfx7GkH4zAN81>S*Q5^C~;gHniU_-i+yse4bTr@kuTBXG;-S zO+5J|kL;KgdD2HjLJz+yzY>2&0ma#_;11gQQshy0v!1(}mQx9N03gPr0>ofGF`XQ| zX@}`mrK329+|?r%5){IL>!ZElCsQ4q-Iw!B0&~E5up78&w~?~zayDETf0FMDZ92=s zIEIhsuA*-_kN`Oj9Z#eL8HuH_&xOJ5(|2@RDd~b{-Gv+3tP^%CKI;}K+MFww6y#5! z_0`ZDH}7vH8#^bJa@H#ZZI)ZZ-kzC_zle7}9mF>A9G58XieScTmq&sTqgQ>yEK`*i zyS({bas4{RZH+(bD6$M{{xReV=7Lv%&-SmLamHV`;6gkUz-7apH-SnJCV2+$l5~K>pkwdLQarM=3B!~b$)2Lj z!(0MBp~eS*_<8+P1{Fmqt(7Ii)M)mZ0vFyFEZ(r!R+XsbGZ%9-+!~CA8}-uz96hBqhan~m(1jrx|*RQsg)DNcKA3HOAO#^Er`bPhRe$lt0r)_?*ud4`0)O3 zE6-(d@w*W%WQ!+nC!eE}NfJx#L|VVP|H@S8A4q~xer{97W6j^oJYNgKOrJ90OjU3?p9EnSAx(dXA~e-hHz)CU!46h{l8S_xYe} ziXJT^QH@Q0PHW%of?sKqd{M8jJfs|#kZ#kjxqXi^j{u4U4acEadpp6yUaZ(LfVHPt zS3||q*t!9naQ`^wYV_wdEgEL4`i2hU;rXrRg1gR}hnO0lH7AW?JXQ2I){|$~FMB4=>Tm$UTe~yGFE6AL;4b zs!j#S{YE>S(j5go-+y!t_`e0QYErhpLf@2N3R?J+a#g5i`=$U1XFPK}`kuwZf(o5H z?7XldGR4^GRQ59W@0sq$yC{KNtY&+eN=u?&Ud^|L7iz2S$cb}wHlkB+WYESN^Zf@} z5y{I25@~#1{to~OV?I6>gUyABm%BYN*i7C3jg|6X$Mup`=k38EJ z;HS>d`|#q`Z-tfm&`~TXHK&UByHt&Tb?ZBaqXp}iq+Qva~-Mtm=W+#&q|KtwWPy7cx%ZEH@Uq6A>%P&h@=?$+`y7D(!V)D(}b->om* zN+Za9;YLqA@+7KJV#)js#C!J8)85(nt>Dm;(^vrcPKa!TTQIfgQpq{5-GE->ehujQ zZK3nG9gm-NPDJ~D&WAi^)&|vT+D#Gm#Pf}%FDD{0`<)b$zTyoF@yW%> z%op?#_B`u83@Pts(Z@!u#qv6-8`E6}M;uXDmx(w!8+;ocH-Nqd4|l%0m7NkL5xeTo zoIw??#OoZo7UX4NQ(eAs_1o^S?q`xk28XleUiC}eihT*o`AfuHgcopGMUO7KQ3`k! z;*d)><6P+(*;3;ZtEvCa#($R38;Z#kWEF%OQ2#EhuN()Kg7#&t;V70t9K0K5GbODv zjtsOR7A!wb2j3Bf2+w40cRugQhupvqJGoHKb#`v$4&qM|q=+P9dA}Zd*A6MKVkoIF zlK2D@2Phz+@S}JEhTQNRvJ*o`L;!{D>ad}_y_J|y%A`wI^RxTULL_-*Hzu}C(Wvg z9ur@ZF$vo*%yczxQ$ULb?8hzdw5nm%!f*sWG4k7b}fOT1~O-b-d({}C62p6H?xRwKG zF){z$;xp;$Ute0rWzm&cswjqyJ_@u2=W*eT&apyndC5oReQ*wYSQ_0>1tafUP{A`=FljBir>3&RtDEIU0<07^q;9zvGmpHeIKn)sQz{BR zAMOnQjIdxr9DAG^4`d572W#n?)X~C2w>>v}nB`$zZ@a9kLV(n0X=GGi?u+)R+ck1X z1fp}+G*zhFop|8MIes(eOs1y6UcT1%+&XwLo+9$9JJToB$+R#@ZCYoN(VEZ>CV&gL zXC1Bz33d*SEk@uCG$FU*|5YRZG~{#+{;BWZwqq*x#=FJs+_)70YL)CqxMtifLk9cpnatSJn5vx%@JDXgJ_cCp+O>2C#KRG?&+B(L5g7^CM@MUGt z2tqlo{tgK34kG!o=S|&Y`y|8q&Nw%JyqLbTeJ(Ff28>FXebGGQR9>9ZM>Wq9uKM-U z!iVJUjbb+w-Lk;M&953-aZ+>1zaXD)^rlbPvOWen&D53fFkk?5ci;K@>yd?4Y;;t2 z&NL9j1j_~>fal+x!D{AF8|KK1fr0v;`~O&k+D~%wp?`fZHt?2bwIUl)Ci1?|7@Gtm za@jmWx}V@*UzM{-w5dUh_WcIC5Hhf99<&&w-e65nl9lS&`aRWZzq z#({4R;R5!^fi}BnZ}v18Use#gYky}?pj;=qkk+4{(g)d+eYA~sW8BO7(5P){bHt_EvCEb zUMzeu3U3Wu7FlZfef?DoaY+y6QvV?zny5V2DxE~43hyEsF2RW!o?!%zRSI>xYviur zLmNIxo;`1MDoAOYwrQ?!ZH^%2Ez~jaq;Hm}$cqeOoN5>`wVl^Z>zrr}x|8tD2P7M~ z2f7k$bbOq3bjhzvhfx$s4}aWDBOrWDM}+n&t-le-gNf;(>iUxl+6HF3S#CSCQHFy? z3)*q58V9#H>%$ah_mnuDA*%L=&LW*2n?FqFX=Qz7r4X$y^yqYNq{N)#d2}Uvh7s75 zZLrfY?=Ra+?P>aPw(E~kbM<%B+onP429vkya7scR0V`X*_XZCDSMtHYvhns{R9Rmq z?bNN{Ptde{nc1#UzfT4c!QF7gyYq-7Z{l}urXr^ZSSjnZLcP0)Z>c=&Hliyi zwl4q8Pxsnn`bTo#(BQGGZxjjegrR`RzQ55K`dxFTo=#$eFJW*NEe=WAq%`k~SIR-N zOa(WF0q^FHs+_Yl>ynlF*@tqu*uSUa#0zwmoL$n&4c7w^Boq4?o_>SJLSvB#xUhcSet73Q@jCzTE z^^_=Ik~nY@+V?M1pfAaa)_37e0YDMpoSQI$z-QYJ=TKZq&sbfbtr$_^qaODu1y)QXqFKSiF#cJkdd0%caw{P5u34#LZM#dRe?7k!7!x27$Elh6mX7ahm3BLJVeV0R54D%CF7Z~Tj8r3l?im&Mtbxt|h zNHn4z7lL3pv{6CE0^W-e-!`1tc5IR18ISXQn;vGo+lX60mQS@j2V8zUTJ7Mxw3u-` z0zTcl^rlX14do$xOKMP87sF;D9Xh~;0FMS}KGW@+-tI^&+rS&lOX{KzdfA7NqeDLc z4Cjy9xt?ZFH(uxLIhE}Hv3~!<+6OP6lNQms;PN=$>xv3uszK58lUToRpFR|(+S;xV znHLn4{CGxp^?N+=9Nt{DGy_>y%05rbc_t*+wZ_4K^ZU8~^VG!W# zj|g`1Wnr##-7jVF5$2^Ev7>6nXN=kP6~DP19_gZ_fKQM{oqZagE1$o)L4_r&4gIxH za+r;`NBSPL((S~^oCYX& zOC#K2O35z8LV0v>G8b>^=PYaE>|vF&izOeE){3A>6zlY!&i+i2c8P8vDaF-g2zqT@ zjsSx7Zd&G};8Xw=ZTqz2Nd-$TBQKO+BH`rT2hdpzHGISIRQ5*wgfjOib z{mr8gaRVVNEB88WXjBt^-&k=~S1CfVL=mm|En)qz^gM*N8*XaJLPNE8v~L1G)(B_y zKY6CXrd!F9#=0VVGp6QDRj~nQN5fPL!RIbwLV@xu%tmS*>H!KLKpl>{DDlwkPIL@h z)AmiIVf|vciv>5XK@f7Ma=knG%%}38uhi0I!(R?l49Nz85%jLBgP&33$&O3NsTF)G z=y{Z|qs3R*r^`Qm_&hx_^FUMPB%>S`qW=>S%F ziZ8d4yS&hk1N1xrTn;brGpM11Hrngv0YE7XTb1K@{az%F``muQ8EZPp=%oGeme;@; zb>(N_u`sl;AlQTB;{?Nr!mitN$?we0B2DJ#mymVJ^V0B7Pbt?XX4Icp_j&Gfn|71@ zBK5QFD9KelkgLOsxV1)bpuEPzikl+pPl)TA=6#4-D%Lr1Hkj9Bg%_S(R)yrUL>!&( z22kZgc~?W>Bla%kg<)4dO+w>m)I3*Nw0)%h`lG=lGCl}%yuDTSDNkM#qk$?Afn`bU z-BlDsnr7b*(Yc6bM*?@?T~Wae0a_^BKhSs8>O?D_tRlkW_(tJ^^R90sE=0%mJ$@(d zxFIw2!n%VfW;LB3Axi*62DcBU+UG=Ac1Km5W+3Q z{q&3|c$yKxIn?;W(e(RP7Pl)IXR4VZX$)Tsd_@8ANKxEn>EszVL@C8sKSzwI#>obZ ze{&8lsZvAorFoIWLHO>$ae?}(v8GDzTyC<2Zyr3Gm7YMH5+% zn{pcbM8Pp5{p2+0g#2Y%5`Xijq#~Y3R8T%>gFkK+3u$`)o25f4vCO}WNGG7AAa!$@ z_M8~NG&^@|_IbXilVXCtPi(3r0A}`DS&;AVU11c;k^ia#T4;rCgU?YDq!8Nv+<=rqR_GNcz zoDa(>KaBJ3T+s6XiAft2Qx|($x3yTPwf!#^o3D|t{HVE!h|enz-XI&lyKUs)si3mc zDNIf69{`UJG&wia?+;*+^OH20+-hYM^wN(!ult#K^pxb`?M=}QE62QPO*0{Mo;UXI zUEVDa$YP&B7F!O3f$8b$?}SH8$|n^0uQ#oiUArPA9R`>aay?LgPPG=V;NkaA>ZdL= z-))ol{zA^>Q_I!;R3-50h7NO6D2>JQgrGr4OemCkdihp;mip`?3iB@C>l3GU&T~mR zbB`IOoHAG!I0pa;hI3@Lwiw()5uOeY67im3xBJlNEQ4}88hei!Od;+R9B^CD8I zmnbyxZ~L>G*lE?d^8_AEE&Sc|*7O{laLWp_UDk2jAYDq&F`jMaBy;@3V^4X|#>a$< zQjEyn`_Zv2p7dZKztUlEb5-P8!^vP$I1^br+Sl(OuyE9I!Z2=*!(Wt@Hl}rhu3PqH zm_J#rpi!PVn&O~GUo7`Ur{*Wm5p{44@03aFfLI1k<19&PU(`Ryke7@;D+&WIvXf=6 zwdh+6&UPVU>sd~i?OT7ZB#`8zWux?JARzR2fZEipHj1gQ{~0h2n88?{uEYqGE=uML zDEhaV-GrUuag=$3XE@J+I`nhtS7($vluBrdDHq8~0NV92U>^2l+~o$&HN5+q_Nb^i z=`m9Fb~W&au}tJY0X4PS7BH9@t&^bexU~Y3^D6ujTZ(qCw|?olEwP$Ohhet~3=h(c z!w(M3{L73ux^DEBRDT+YPNB_vHKsyeE-_s3_hz5}wOIF}+M^chWJ$4PZ%E52Ya%@+ zYFzEUo;BN!v44+p>3IO)LZ*9#(87k(@{PHa%-_2FgcIokNqw)XvOTNF?kAXQNp3Ia za+zExA?Bm6erIJ{%eqSHUh7ZKO@X~z%c&p7R^N@hiC>r~v)KeRGe*mTO?p@{Lu{Dv zq0DKYUKWi~Lq{$jJ8PH0q`1svWK>UHNVIc7(pAN8VT>o^t57UvlbPTrUTaGkhRDK` z&kb6irF0cMuwKtoI}NQiuk=9uu7g~#c3b1~9NMM}*6Kizd28|ulaG)fW=PO%Q=gAN zEgmmzhx@av$uNDOArf#`FB**hXzj$SUyFpjOV8xZG+N%js`+kv+P$9Wm)Ok6(}EoD zsan$N{gN>q#EZ5)`TIPlfe3dk8I3YKBhB~)Qzcup5Xkc3r7v?=-KTvtQ3dY%7Og#} zD^pufOUf(0%a#M>Jq4%3)?$N%cP|rMC85QOt*CYGu?{#`I_Cv1FhJRZUc@h_`?VorP@$Mv5Uz4XHEZuf&Kd0m z#vFkA_q8)^4i+<4?WqT{NmWHwJrZuQBTfR1ATjtaB`=u#LA*^66~BB*_E>l;XZ-n+ zDZOzlRa@S>Ck|p~n{_*Gns^osp3s?A*69PZ&}XmTq3 z1HdyZn@sJF5FNT+-!WN#P{Bf^egr%g1j)R_V!3CLZA-qWB-`NZWhMy-K#s1)A*4m> zW>{O)!J}^|+m~7~^*%6eSSZ*viT)HQQxyra8(?r7|NO&yPj`s$OH^p)qxgaxH$ls9 z+$ndr9t>WIQCf@=ePVwjR7vKlX!&`byO!x-=v-9OR5($4suv^lDwzDEvn9zH&d!1U z+fR`b_$529%fi&8=+yY;g~HX!gUlR8OO}{5JhWm(hPjeFv`#}dv2=Or-{{e&_)p5y zO8l&?hAM3_2rx{CdOK#{dl7aT#BbnHi7*RW`_7`>C`I*pn&Rva15QjN`CqHxO)dE{o0 zRMR%l34diSVqfo`(>fI$9Pvpl{5yM4S1(?no%HGsqtB>N_OIV*C#D(QyDz5N6=`Bc zP30>b9{|BLFlw3H7mh!!WQcF(>7Mi5k;!;geTsrRC&QD%o&~i9SJH~A-;MGXoOKAS zOK4qX&@p0c*OaflE@0%7UmsznHrpctJD`l(Y0AoaL^maqH?2>rT@Z|@fSW{ZRo!7+$jA_`+=g~;-<-r0q zEgylpYPw~1^evzV8lVmFd_Pw`8z)L$RxbX-2exmLey`+|edW27ziW0J7oOUYyGv>Od>jZ(L*x+6i!njU2)fA5Xi z`oAt0lY|Iq$vV)_vYOk?lx-xKX%Ra-8~graH_wU^Ru~@pt0bZa$~YqKhmcW)`zJ7| zi2QrL#{K%)K0uT$)2D=_siD^*$!- zxng!G-q6A$5t~4I9B%-?_7&!x4TAyiqS$q5<>A+^e<*~mZ;CnOt|ehyjh zoUtGuoKk1|!6sjmyOBFPfiBSajx8mhBJW;T^D$k6hl?S?9RA+pjA=FsZmf7Vwa@wG z?;3lXk}JkBxcV8kRC>V=BQ5tA?K6C8tERQo2CQ&WgkQ}#RDE}5SM$Re9U z2HW5VP9ivUQ**|i{>d#NZ8|Zx}p?PL)-=$d=f$;s^*LVe+TEzz~bqx+~`8a@i{*Y6!y znR4bUdMhMCJwg2(Fl>cqd0@n+ylR3hl9^D3 z4k--ZNBh9%!k#vr8CQ4B!O|6`M1A7o^C=D#ea|*!mb6`o?84cj z<PEZhl}1!Tcx=(6<{bw#w*n3o&CmcUW!%f6UKm5f{uwkWp}yT_#kyQ7@7zTgn-q{6ooRoytHW1Ynh2 z1QSjZN#dE?L2<^O2PzDRYioL7rxyMqw*VB%&2IW*nAKS$yZWCM_mU9Gn&A(G|87#W z4DfY3rRS`%o*;!(zU%9|=a}>~R;_dl*Yojfxe=}IlWk(Y?~mk@*v!ml6LI^#vq`<9*8M3$UW@|OB#)2teYg+$j%a1_iO zs~<(-K*d6LIwNSa()9&PJ$e4e9CJ$~_Tp*oI82ptbLcQeG1{`VdugvP%#qC|r#fUu zwlJ_kmf=)-=N$E|RkgB{Qn`-I#NJ)>c%I?6Z3`D93cqD(YVsTTU~>{o#u1z|u;Y7w zLE5G6YFC3p{>1R|z~{`kNXRZR%Y8o@=5-Bj4HLnz_q#v&NNjX#q!UG@XAGPPS+j%gGtE0)(rs+D$?e+F=D)nP zS5vzO&QrLm4?+fOUq{kyygt8gxws@vY=t9?{`1VhEJGgHsvdTued5@>LL&1djtL!Z z83|aE8$vIsE8pI>dLuQfGyV`ss9yNTUb@$vQPyuN&Mv7DWeqB1{{SBgpJ9sbueAF; zS_PI%e>kgw6op1(AH;te#?U-79+hKxr8zA%(;NQ)v=N39vFD#`DE#YdMZUMZvJ+0b zTpT!5Aa3qQwP}0xG~|8H!#g))akSIeX9M1ttjcmYH2ZDR$T8ZzsJlC!Otm^6hW;AW z{72%e4L3!(NN#1jK@74sHXQXHr}USE@@OH1L-p!&#bT0KxV*c>X*Y}AR zA&DGD2yUH9f!e+&{hIBdpW^<9X>em7Xt^+}NN@(=M_%3Q^pD0G9*zG12v>)^72(S% zEJXJLIPN2I3{c0GA-zKoez~q%uJW5OHD%Z`^dO5XXS}u#?1R;HJR|&PdZ{;_)NyDZYHyz zKXgcbY_J`MK;zoGGvPT>`7U9)d1VtxDKZ`yzXv~sVQVwc*y9sH)HTSZ)A23Umr)9Y9|cOQ!4AY7%5WKbq%jh3=y~4{}W>pgm5m_u+o0 zXW%Fr)xDk67V_9d1TaMz2=FF1zTw*k z10KS+H1@K-@W75+f&fO6=2vZ^K!laYvB0ilQ}C1?FcInpPeito7Mk{L`&xNUILPFH zImfLrsOWhI#+_01;o!djSX~pWCyJ($OVHU?Es(4T{sKCCgI(Xl7*B^MUkO-7{$_!x zl7irKb7ynIvSX<80qN4JUVJgoN1ftL15Mkg>Gzv0HaR|0FUlrxdSjo?wf-siR^sH0S7SS_&36$o&y2VXGcW41w{n_5L5+P}l=@ekpCukhEy0z1u4dt|wV z9zdep#@m6R6- zX+IGyHE$c~7UA`0)c(zOWK-qAF;L2w?i6*dKf-zsjeG_BOY5E+dF+4eAyne`f1n z4lvd98&;Q1(ykIatINP!&sS9rKzgJr$ae9dyzCIU9y0;Uzo2ZB4CnzWUPaBp+uT`%EZhIHSHllXe-!YeB&G&t`p ziW(2>@>CkSo+YE#gZ}Wo*70>hC0%5lql)V)cQL zMnE`!Q;>P|#Y9EO(a`?Ma7C~9FH-ol@`ryI>-QJ2xH)9EjQLQ;PqL1ktJXk8ZVoHK z{t9ZAUjn`)Yu~f9+MfM3;&ZHcM{&*~lRkVv^!Z!2PWA0@TpO6fsXI?XM^RRtj+l^m zy&xKDYZ1!+W}7{Oibg=ePrHntZFHVNZNU& z3mRm(Cl~^&&8Wj?2bj>wDb5x+`6ShKNiNsS)L0*zDF8nz`=3)u$T>=CX9XHs?#UyEo4Smv-}sHz*-lFanNy1>@e7pSnGcgICn1 zoGhxl$`Ry}cq~UPUpngF6Fwzr-x(+Hx5AGGTU^g+Vz&Ckny#SHTQsHFqh(?DSg!}v z5nj9T3*rTj!;J&`a_Si_E~S`7rD)(~?yekSIF}tpc_*-}uK;{CZv^NET=8Y>H#+9C zsEf^VMwA3u%A7=p`>4M!)`cq(JyG%Z?QQVK;%a$+8onf5+co~Lt4n2fXQ!v`Y(fu` zHTB0rdz$f0IXuYZjFY>6%g-Xd?){+yRPK*HqO&jCM}~B9$Q%PvUq!VJROYS9k;OU%&gbh))Gggk zPevW-?Cxe{+2TiIp8XA5k~~M8b@ZrUh}+z@2MjaMtypqAf=3)wOK~KX%59M*KD60b zqU3th-vzgOR8#h(5|1$uEb?{lS_v3b8SHB`HrYWVjOMiCebJNY%}j`-uOswB_7SkK zxBZrUJ@#AFm|ahCG`j{izW!AE*O&hQ!AHI*OZLql;MKFMc>rn}mecoG@euYnuV4L~ zG;1$|zp}oIeW@-Xve4wzuMbQY22b#>g#3TwjZ@+uim&`{tlT`;8jZo1be%#I@-NUX z1}hnSM}0=rG%%-?a!V1D%~qa8Dsj_{(LBoYgVb&2qPUbu_}hVwqM^3g=Cs>1yb+_% ztoV-AA+z(=8RM3C$l#)|Bz_{j)5Ll=h`d#$E}^A8rk`bhaTTVQqTO69lB5KgS=HO_ zp&*?71#v$Mybr4B9}TAQ%&f<){4Hx{f;gd( zqfPR&K-_2hoA+k9YfkRS^eEHiiJ#zo3f}X;6Y9F&sj4zhsF*a{OId*2epQq=bvVb= zt!Mbx;r*tCCG@)HwH#L(W&9EwIOW{i9Av8F_?K}k-q^0o;ZA{PbK!kcTht|!$k8w5 zpX`!jd6rR?1P(fr`BybzW#S(d>DrtUJTcnbf5gLH)F)h$v2G~}xA3tYhg?=tv8cf*|~T~gm)vC^%ichXF&s$QMZ z!7_dJ1Go$PKDEer$HU$ew$l6;plFgwb0l{qKeSvFX%a_Mf$9J_&#ijjjC4yM0tCG9 z;!S3nZQ5!2m8Vo`fmai{^eW5pk}4_3nxh&|lyAs6^IR<;=Rkq-ZZh-d@JHV7v9^v zaZRVSgkmW-7IpssR1_X}63veMX1;gvww12@M)6_ro`Gu@mkL@~-`}ZUDM<3tO|Se< znFl_A`c~C6RxzAUJ=VM*spxt(nI5fdQf)@fucicz%NaNaZ>Bm|AuXKO#K&mmqlp=q zmHBp$;`aJiqIkB#F@1HW!)|M>4ZcNX z+R~xmjD9uD?xc=>OEbOjzk;lE?+0n|%@wLM!q@jVCQ!{9rUONf?+``@a%x%aQpS7j zd8`^cMn#s$Fd=Xb)l8nO4<{W7HB-TV@Qmns9p<5{Ymh`z;6Y<)dnYAGEhDJ!+n;VL zocN{jxOk65vG8_-ZhX5tX1A85lG#)Jd>YNdHR|qxp1dsd2f#X zAHm{l{ZmuD&=Y07nU6<`)=jJCvtd#->A3Nh>@!>rh496+>nkq^S+f5CYR>YR?c^Jf z&Y_siCV3$nBo2oHzJ;~0Z6rRiJdbs7^8VmU(iI#o2W<5wxt|Ysb57GfBWk*~q@H#y zeCbAc63O?GJMc080DT&$$jYTV$nn1w_-|WSw9OYour|JNhVJ2ZeZv?CSe~Vi1fN{j zv* z+#LDmIbu&`;<5ErvxCQ47OxhT(ml-AR&q$+>d<*xkw@Z`L8O@4xzNdOEZRLb2Ung@ z&pQRiSny7MohxEI#;Xh59D+0}6AV`KT!-z{0N< z)=~~INHtvPaYW4`5tqPh`c>+J?Sv6Cw3uvsi~Zy2K}kCt?}{{-lF|(~VOP4k+OlK# zSP*mQ22FF?ouWyt-P~ztQzmHb*BC&*x{MG#22bOP>+dWtW{tHAnfJWPk)mz+VO;Z% zdaEv%Yk8?(BvP^)NcLO0afWiI`U(W1S+ml02eYw zef#HKzDx^*iGR8?*WA>TMs0OmfctJD$#clc;B@+bD!Lf}#%Zhr;IPgH1r}k-eicRu z=O(1LQmR-UdsJ|cBlkTik-Uok04X)^rDMZx=cInkDHfse@4=Q6Lb9v)lGAie(TAF+ z75ee3_%a*q4lNZe{{VXh*mrQesz>`i_?e$G0?ULN@C;O%~CRTk64cCtw$ zsbGp61Ci(lHTx~%uNHWB!jRkevsTpOw}$>f_IUShmf;<+g=1X11DuX4madf#PX`ra zkMV|qf23-0%WZ4sK-xSxjuZRUDK7*L_}z-_w2AGnEK+2*o;gth%N*;5Q;zk8tv`r7 zOL3+6lSlA0@1;)-)|U5L#mKWQBMrzzN9W;`jNnwh7V!^?pH|bfU0cKUO$EiT%cxr1 zzDuuIN-+D%ImzleRxW(3?ue{7rdh#{U2eHnri2{A&=2Y{T26 zo6v8!jyZrb4E4_=*mSR5)KdN{W>_tdG>2pbTa_oNBhwYg{7>-T_K(9&Pe8epJP=ti zcImx`IsE%nx%moPkoYr9i%`+v)2*GaBS_rD?$}vDCm21@56-Xne@dBrIUTjsDy`=@ zbq9s{ut()se+6$OPYLPz9<@1!TT65c9P$L)##NN}Qa>YIwrr#APiWK&NHUBkwo|1P{C`uJD(@%TET}A0K$HU6Wq%WU|K7YSXUR z;zDudK_5gxpQ)`4TGsl@!h+t~;G~v!@XE3j+!i3icOJmvx{yB9U1cg7CL5Jc13eGZ z6)Rv%Qsh>4iw@WnZ6|P3&PnGr#{6pWeZPl10egD4F$RuA63^yw5G)2b>c^m8dh12Z zb{5Zlb#WX~K^b`^mA0&E2P{uuE6kI-?wJ*Z#q^ewX!eEh%N#jiU;C`Y?jz}3^}IKq@V4RcRriDv z*7j*+SHfYh6;vz6PeUfu+=rp$cdvdH`(rUXw3%I`vFr7$tw+N;b?UwBu@KhMD7V|y zcJq<>^`UM=;(5-e@X}9+9x>7WB6w?2hTQ0PY%OMZ@NR8L{p=1ug_Vy3)K{+Qa>a9E zjI70i;kOJL<}_F12w-dssGoip;$N?UP>{Xypk-nyeYhB?6AN{Sj@ZM3y+-jKhyB5Q{G$8ACy5E#U#^P+&FJAiVbb3 z&oRKkInQI-r-7nKvmy|4%MneAYjr%f4{}9Wf_Nn1STzL@{hHS6F4GRgN%3Y8O3rs2QrW+pB6wtbnA}%p@4=j)t^a!sT*8=hGDl((YSSAG#zg zUB4 zIPb4;ZcbshjB_N7*$@t-pZ>iK+{x^6-Y)UKhrSJXGhXox-LSd5n(8ew%HAl!n^3uQ z!o?ExA#=~Lug?#T{{Rbh?}UF9Z@eY1+dNaMt=LxpFcW1JAo&? zRAh2jinY&`E!kQK=5dw>8LJY+NIqxGgz=u$S|Ku%3?yZF+Ixzyvo9bqDnE<2(zQ6u z`})wfiFYC3VjoFNtT1Hu^&>lWuGWBp;VG%=AY!2wrERc*?~!trx?- z2(YneEVT4wkI6X`KJ-T0*pizM{0N$(s(6-orm^u(w`CNQT58bR$!X<&>7jx!tDJr2 zZdjl8Mr*P0){vIER-LIur%2X%hLrLnvtUId%%(<&kGkOkjQcOWY1w=%hHJ>QJ54-2 zl=savBI&TmvRiJC=k7oOea&*sYI>?V97VRdd!hIzR+mePJ<@72+RQ|QBw!uNJK!ie zKg4+8n$)n;^zRJ*&ID>cD}}CnN2(W)KG0crNse$7X9N4y$LRJKR(=uj4fMB6 z$qmbSs<1jmyacV3w$vwv!@hdvYVQut&35xp_sn(tQGJ>xlPBm*3i?d@K_ z;tdro{2vE~G;3L$UB-gt(em;}04~gZPfSrwd+iQr`NzfH7qhyR*Tz0Oxx9GpZN30!Is5Ju@&3_YO8L$WW-BS~G_5;KmK!hhhh$jT6TMk- z-k&2VIO~e3?sLqys@>14YWmgBhpa86KiQVX_IDdv+>S94juBWW&Uqn!+7-cSI>v+G z?RoVbQr+(0me`vZ-8T8mpr|Yd+#9p_Rj(g-p7+AGBGoUaN$0VN4WWmU0pPt`;s&`6m!(+T&ZL%~?7(Cw>(4>xJ?c+}Uk-KOiykFD8_*e6I3sEGJGq7u z-Ua)_daxY_divYokHSBPp9Ovy>sRyZ*7NE&N|IVkyvWOt$fZZ!z{x*DT@6_U3R8Ns z$$SU!pTe47jU?AKZ7f;AYk3`>pQlLs+A=^sYd`V6;m@XVUqgH+)3tp;H7y{0`Kz;UQuQM1!jll7AZJJ`ea;PVrW^txb0+lGjWUPo*@Y4J`73v9N!Duq1se-ZY&b zN0U&~Z=6Zz06x^w<7CY1lI(ksMn4KsdY#G^S`z7=6w`E_LfcEZumW3YTIbD}jHF2! z+M}==$gLT4$?m=%-udxnTd1xl$_}ND<;UTQdnBGIn&SN&`FTqyW*@|ge|Sf+>0K+O zyi4RlfJT5SC>&z6<~CX(5)^0^r6hcSWALUtTgVcUcAfw{AFVx`a)N&fLVJ@*vJtgB(F}09uGNVTP@}OvwGG&9YH?cMhiC9@ulyvJt-kG|32-7}%BAGU zC)DKi^cWS>!=y`PZ2~~sVjncf1_Z?QWA9frI}3e63yVvrLnM)^lH81gkbeqwgz(+z z(JXQaoW>No@)xl*a}&xxVUpO#3|JyISc&K}Sw1Gw-VX>#_Goi0go${n2Q1OH=Ev1? zE2WMVSd{?bS07qyLL3!z3>Xz?FMKoS7p^lnZGH`_}K4kMjAK{sUWL0oyf8QF7`vvyLc{QPby-C$&n^04X*B z(P$1+KT?`YeL0y>vjOs;6wLLHhk+6nVU>9*6263AGE%<5UUyMJqr-79@Z?xM9{6A@Bq(>AM zu*aJit+Ty`DPWLTK`k#p+ms*YsXxn5cWL9IrCy&f# zsrZ)md;KxYFY-oAl1C@qmyBTl0G_nz0AfPMFhAL+M1j=BEO`g0E7qy(E@KD7wustQ zy`BA#l@T_`(ns@1r2K&W%XZOoDjY603$nt)Jots5Hwh zI_k*BZ#1GnV%j9>ki9eSjw@2$!ODgw9C9&Cy0qKSo(=)U3U?|cEB!xD)a<6xbvtC1 z*6K42#8Px+VS+22@G{7n0$ZKitevb-9=jqqAEpI#7dFyv46IHV9D9RQG|vwI0AfAh zw4Act-nGsc{ldeMPne?1L3G40m;u~(rD!7ZnmH#WTdoB-M|1`W6v-gDU^&Rm9Ky0P z{8tTvczm#diMF_gNZ1a9NT)xRYQ@drk|bywA%QZasW#x_6x32P%{1;@6(_Gu)y7@I%#efL zq_=jBw<)N1yAFn>fkA9H=S|8s(9uTPnh@M9z;n~Esg#mJk(!1DL>LMO&{U(XN_|bE zsMg#;`40x4o=Erevufs!Glb=mkk|~x8Iohxg7;`I?sf3-EYMjXNzvqM!V7>nJn=5Rr3ZJzPK2x zR`#YH+gBT#B%bD}$sm+@ju`k_KLhx7YacXDt$S-Zorf(bU>KiLdXGWHcxU_-0_G;s zJR@Rl$(Q>^c|5?m2nfd9`kMNWSF(cjRtty&s>mb?f4sbdUmX7c!Ag8>;z@jEqIeEN z7}D0u`$8ZW=WBv+Nj{kELfstrnA#`HlR&~y(KgvR+n%3FjU83Ya4r?wEQ~rDn(|^G zl5##`azGVboOx?=7jg5CdfJ}Hlv#N4m_0ozrPa9DHjIy2xfF{mIrO1n4neI7O&Ko^ z0Lbf7+37M|FcGG6S0}SpaCY~tzYkadM1m}l&$jdH)vi22sZ3tx{${wHnDe>Lf0c8$ zr*x`R^c`zT=0hZnCIHCj2Lh>FN9G;?&U)4+a+S)_qst}>XEpX$>`m~uPP+In4z+V+ zR&7gGW^&;4kD>Z|SIJ%!@b%A#HTx|J&=W27+;;K%7ho&(FT>vtbe{zHD#O8E9@E4S zX_j%q5qT`ZM)p3WwrMq?(}RPQwmfU%SB7G+@n3_y7d%rfwf3hVmIar0mWK|<9_3FS z^`ql>w4a5xS2mYg%e{kX!06?GXH*-3SMKh7vfY)E}yIDUEpTxQcils3vf*aIX z=EBH`r)XW>K_L$wwUUxWQ}AK0BI{l$@k|;5XhCfJIjlr3F1)~3&tfqcZrpS2 zUeBuEYwF2mZQ;1!62*xOmj$;eARdE}_rEInlJCVDkBYt%Z?m2@wzJla{;=Umn&#?8 z;mPZ;ebL)Hk9zu3#l9)O7+!`k zDkZzN_JIEGM?DJqs2BpeBL^mVmFg=QUlKf9eeny$*ZwoVjyMh6{{ZOLQ4f~h5yCEg zan`YP-4kB%P4Z|y99-)2LVKQv z0;ifi&q_3!=yARU@U7mB@gnz6lFmCQwM#WiXkI@uCEJ~#9y;@nO6~NG8qqZG?8(1> zc^%O5!Zrf2{vxBIiq!F6fVF$S8E=P7kgb$9l00nO0p?)vJ({*O4+HBqn)ThBS}};0 zSC9qjN}L~Wr7vj|YN0C*vD4wxEfT^Jm55_L{mLPhx!m zr=N!2FV5%DB~>59M--Jp49OcdREeSCsWXdC5(?_ zRObtB9?EAjV^DHensPuJsIBRI6?qZN^8xi3tG3?{bvKMj6|iu57=CJb)z&QePQ zC<%j%@;z!>T^q?}9^=z|UHC*BH)fchauo zlM+I}`W357plXb}n$|!)b{dXoi;`W8@PoLett9Zyq~`+N{fjOIJ$x;yIAiukY;@a4 zGVV7{eT=F?n}!8y4+m?~kR!2hPi8eNUIpK!dYAqXEpz^PFa7mLw25BMeUHO`h#DL^Xoc(`w;`luW`(i1j=*-Vc*8nv zF2kP7T{n%M8`nwVx!z+3aM`pB_r-Htx5%K5hp(l3@RPQu&dnz)maOeemc(uAk=MO_ z)!}~-cz?(L00KS@>wX#V-IdpfG>;J4Lw{ow3bW6%DoJ2I?@X4jh3uXv-*dU)xWKQx z{{Y~azA5;5s(dKcd@JH>pDOc9)`Zb*Uz$keUZ)=7vadB5-JLi)rhRU0B$72tu^dt_ zGD90>QIAj$aoaSO@POq9VZjwad{gk(O@t+_%=yMJp!50Fcs?n3RWE$Fw9A7K`_i+F zh69;?9z12sHT- z5Ao+FvUQ_<%{`2c<88Y_5rX3!4ATXrfE$QkuIl8E#UBpx7CGPFAXDP_m7rsJw4eRb zMMt$)AyJjir|h=x!V#Zpd{&_d&U*S{xr}alyL*-EO_SoEh8A6sB+N6{AEiHS61|b9 zai&Qo2yA;ANVNFZApEzCLm0P*^yYTAGhuOS2cOxFpCNWi_`1Rq4 z=ko799;}EP0E>(o$znW89@aKZ#8l37@*<~S>Pj+GasD)X|S`|{9VvSMuh>phvMtZjc^{P6j z#194NR%Yi@@m`#W2QBu*&tAhk*Tvr#K0JIv_=%~>d*Uw?T*({AS#6e5sTlMM4l!PT zCy1uHkfO-McRRCP(TtwVT8v?WL{@1s9$M7G=uZW%%zAz@YB(SWz{I@tNBo?>UuxznFDFg zd{^PmvU;lR+(qy>vobT z`&OQW0N6US3pc2)|&=Bw!;6#!w1*Rq2yNF9BuL_oS! zN%OeQYfDV>?V;cS&w7)|NudA^KS@ z0D@xM>RuZ7gRJ~fvW*H^Sc@o(i_zTVpE-2CDYp6TSU(Y0zc!&9nb01S1VN+*wr|l?xpaPL)UyusNHE+R?j8AvvToU+TCRR zp4@`PPff)C01tlE!T!@f4Eze7JkWeYe;xDx0BlZ0jiH1*QO1ZEBM!S-Q-)vR9M!>xgW+niTD;NOJ53w|bk$WXtGb%oR7hfSIvwa=*$_IcNYWL%8$F|^kcseEtHS54M! zC-BXznw^f=blrLF)<(Gr@~(1z=;?~{9~^1_0JILZ@N(}!v%9mr@Rhya+w~KrMTd#l zsCwmy3CRpz6rUpAFB(Q9ivhgj5o)*$WPKDesOZlc^n8nZAP zQ4nOabX5bnCjeJZ;r{@KUMBH=qBP$OXm>Z4GMuVHgz=uaJXg^_v)}AT;?D&6e-r*K zczWX2XO(}kbm>znyIUrAVa9s}{A;>$wmC^>=&i{H@3F%@5D(~eNR!c zit_5vrr}_{R^RL0zbg8Ee*kH^XNGl4?+j@6Hu79V$!;4ec9V?YcI2E_l4)Kr@g|*U zZ7g+pQ5kUL9$0(Rr}3wW7T?MK+n6a`xZT>hB__{yl{nIhRy}*dp9!MUG>dI3PqZ>a zYivr&WyzJ#%y2pZl4;i79M$2|4fcN*(%nxP9(#ouJoe85yrv(Eo;Qj+mxEJ)%XZngo>;+bb_x$c zSvt3jd{wVVA6M4yC)xlCUkV4X=qhb4Vy?M%J(E)S+2Ly~NBcie(88{8ZY|~MOuG2b z;K)g6wRyI%`2Zx3*1Rt2?q)e{#;Q$qDdQ(SvMXyN7bcHtx%l1SE#+iMWY1Rv6|1BC zOYkPKZJ2d|YTxwVfnPjtA+xrnHxoKww;q_LqjAOEA3$CBgGq4m*?6B!xl_Xzlci_d zd{ojjw<$KM3mDsit(<-p;x@7v-HyS?son4uu(`*jLslAhJ<4B?-Wpj{+-h>ar~~Dw zuKqfBR%PE7rUp6M7Q8*~V{{;^bLmY<-GSR4M|{&!Y&nta-|)WlsN}$oM^EooSBgG9 zK)H`fwwvZ&8$AtqYurp!uFP{$G`pJ{4Lezq@dL%T%bTAOUF4kMWWW^dJK_(FwD3j0 zh^~Mqp_4UJTkx*E;pr7MeJ)r7kCEAaR{9=>s#-zjK3L+3N0)PGG}ebmJ}h|oQA6wV zFbACTPSpM{c)se-$~x@PoP5S1&2y}Z$7nqP$Gum0zJC2{HMuRli_hb46inOO#F~lS z(D}qwh4_c#V4yFHFR%s&%qtMW+p&|!y&g=z-*3*fy6j_lcRwh+L42*KJXWQdHjkOt z3{P>7N?!|U5bGCnTC#?8wrLf9m965h56f$O_i+f72^^Vn+fShNtNsM={89OukJ=BP z=8r6F8CB0*``5oIBz*bINuX+4Wz2TB_QfP(zC~ZPC!}yP1(zEZhTYI|;TR8OPd0dqv9LRIQ zHP2b8(rCIjg+zXb!>U-{T1R_rI!P3$8aG3bRE&ZtVY-Oq2ju#S^N)pb&EY?V+8&E{ zGnt~bj7S*t`9~brV{fPyP~$v~D}hGn`U+KV4NaAk<{+MxD^7};IM3FzS5R*8gU=k( zZZ#Q&q_RYxFhhfyi(jG8X-;=Ki(3Wr=7EatR2x}ajz?O{)0R^^o3_Mlk~>y&Lc|TcWYb#Mx3J~7 zwYImG^6>4BDDvd`SI(aj{8=}Mb^D2l+Zws@hI$J1PmMbBX&xVsOG&XM)PTB=?hbQa z7p%_r7t%`+Ky-N+k$e9h0YL)%SYgSZ0H?gK^ z63e#|9-S+r?C4_S6w6Xi_G`42(<(*}wOgM~d+5#8q*+Yz0nRw88jP99Eyy_>R=2{> z9j}9YS#N!)X|Uf}JA}ToopQ3oe()ZR=zCU5QE}AL+K$pZOR4BObnqEjaN&?J;c@Bd zR;Ip!>rpp$@PMy^NHuHYw}rIt3wWaX(?OBnOu1O@^tX_F-)4|*P~YtUaBC|yk&zI(pZ8@Q35ijJ_1=a!;f+t<|m7yFq)W-@)cXa}nx} zJBstTPylkggHAU-REoTxTvADz-N{%!J^T>xy!v(T#h-)nPP#Xq4yB>oVWraD+k&hP zUIUN=9zhsAtHEzwl6{8+gVw&L_Gr*2+o6n5bGx$JL*)KZYXk@4+i+#Cv4aE?#*F7%kXWtZEn5 z+HHiF_fiPsjAfXb!0@J>4}rWnr+9UmIF`=JIEZ%&H)E6e*PH&){{R-ENQZ{>4N4g| zaWt0l?gP|hR~m)oc6*Vl8g#T~l>Y#9mKB-fMN&bkz@z%kf%Ix#w+BVb6)<;jFQAJeTKEZ9r%X(M}X?j zD-X3dMstQwMjgksb$wk+V}f|OsUE-aBlfknxYb|6p9t}&vD=MFlM5F-@!q)`UkH3c z@m<`S{vLu_>RQ1RRjerFvze=%H@{{V|-*GP|1xo3|*D*1$FurKuMh%P+Ke1t4Y z{KVs*TB*3L5tO94k6@qT?0ROFE&LyEYoB)GQ;{d?`4+vAG4}ApGEZ5nP_H;r&L&MoXwl6KyYUR_%JlCwQn%buOl@*FF$ zl>?F1mmoEF-xR(HU-%q0r=&$S)rRAVwvxy>z;E%dHN3hGb}F~2rx__|jCS0+6q~Sr zr5Vq9&$67yCP*M+tbMe-$cQs@O{X)ZlMK>0oOGTlt($Bi!w_%6@hX(8n*C8(STPqLP^>Ig;ze9t1Eguk@6hksp^en z)q-`kw}I!Gg2fV+k+b(e9c#q=Yx_`K=(@j^q1eW49KRg~4SZb$x_zR3rJ`A1 zISV3WW77t<^lyw;8V;J;Ri2dqgt>7PC(8ii1o76oUJ?!6GoBd9N;YRr;vGBUuf?1B zJVC8PdlF9Dt?bRwRfatLtAKD0I@cMkUfK9lUT+QSzif)?G~7_zGb;~iIvTSl#>@Ez z+j}1=GIEPEV3XgPq&_D+#K&_Ay0=u6?os#}?55*QC1Z-D`Srf1MQQQF#TqAu-@@J> z)vhfpW*@)Xg=sm+$sMXc5Byv4rH-mKi%$|>Ea*6p%ahXst#fxbx=yPhbahsI{ED~& zqqni1!&Xa1A0tK%LG>A_uGCe+jHcylpH2KL{kObL4YK%e;!TWN-j^g|GZn!mNSlG? zMte3Z)@IN=MX1<+!d2ie2jA=ZM5rD)w7ss;DH+=$=OB)F_OFSd5=KOc_lHXL581=^ zviPI>J9v?Av^`MCVJyq`%Nf38kvoif;=H*&b#vFHQN}p-pVqT znx(Tq@5V+qaDNK9J+!Di&n)fF1az;Y{3Y=d;IHjp;9E_8@57e5gcs2PBpRFnnh8`L z!ys|&bH^3uUl)I6&l!9<(Z`8?6Wk3~O5Qx*58ca|VT=rxiSd(x{3p`6WUS7p$w?ks zQFjlNRfLW`qvRbuDKy5h@h*_s=Z6{YV~B*lh1U|=NJmFs?&qU)rC83;aJ-N1)d>8= z*EAWmUlhQ;Nc%Lg6E4T#I2B{VFeQb|_A*G`XPuPep!BO(3pKsus4g~!u7F&^+)2SWm(%B`g!M!e|6vzc~RC`uar*)wam%NW^_$A|Qcg5OFx^As= z0bMrTN1lY1W78+;Ua_ERc6ZafEWw0hjkV(c0I}Ado+7dF9AnfMgHZ7`)uxphSkXfK&A=r3*DBXUdlaUe4gEIeY2~@|9|0~|P!<7UjeB zKl7Nu{Y=%*?j5OK%XJ zJ)(`9%LXPzty#Qw;%;4HijpbFB_G@E?82g$$y+CU}086$y3=Z6z$n$uPa`5z-M6{VzT z*0%GY)HPt}=MqH<=Xc{-RG%_6SC2wFh@y*5lG!8N@-QmrwNtjWc%{iq@zfgI*0h~3 z%n|9#;w_m~cIj2dn{1IGEJj8+Jt`K4?l;h^Vsg14fNCbOj4n5jbKG{Vh^_5}U>}?P zqt>cjLv81kRN(ibGSo?EXTYX0wXSk8z{O7~QTI8>>M5_LTgh;)#DEVx@lUvcOq(Dq z2dybR3VI^$h>D=ft@G{1Df*SH7S@Xgo4Ag8`kHo~6mdkL52-zBJx|DY#AKeCsJ+d1 zrc*Y^!y&U-@-d3!x0S~37PPjm&zC=)XKJjr@K1W@0w`V1I{>8Cuv(b(JR9Rrh-KEi zYjF@PiEO@9c^v?*QX5p$Ftdf@9CsH%ssoiSB?PeP}0|eoS zJZ8F)#aX_49G{LRYabEZv3;o4>w#|8FzNn9T)ww?rA1Ik&o$5Znkx&f+ZIwARcD^_Gu{O8i{{Rllabf0L+uA*q!7cL+Nk;k%;;;B3HjBg1$s_0H zY<)9aSBa*y8hyWp^viRJKr7BjJ%xHsm!vcrEH>OO$VV8)4k~39*{v7M%eD<2)SCpr zAFe8E%Qb`+VsZx@*Ac5~7q%Ch;FzfrZ~^OG4u#^#yh5hVRa{OF)|Tip=tjJHncT)2@~*rTa;f%NG7w-dodeT>Dkc zbK&oeJSxyWyWxBLSZ+`5Ww^CuS2^vP`XAzd$0+=7sOcKJ_%}@&rj2Hh!>ot`PdHz@ z=7r&zdpBC=b^id`2jXtO96Bbct+e*Mm5xPL5uE3Yjw_?uD;$c8*z#`;XvuSSv1~c% zz^`BUXYj^r*#*SGwy67scs0}b55*oT@uaZn-XVe=R@i<20Jqz$g#rHSa6$AruVnDY z!>+d&}>j%)s!4((ANc8t-Bo*s95riM?sfJv=RA502t|t!hqmbP-#h1Cjk1_ML6hj)RKy_HLiSV z;@=YLfi({i>Jtec=JPKcp{t7oW2uoEY4 z25HT{gHKxYn+NbaJ&0^SUif%`ljlD8!qp;AeW^e5{ zLW9wi3c~H0sG?0Sc=a+FoGYYwZ2FJuPq^P0SvLJLdeji(Pm)OE7<}`OLrl8VQNn_G z70WkvXq}PJd_3`l{t)psro97AAK9l@a{>8?&mdRXJ`Me)^!vN(Tg!c1%@xF9)sGnV z`h&%MFJ*or48U`arnWRc9bajS6Gd=Ai~v`#AJ(}mQB@b2-G{ECtk2ZH4g7V}H46x| z*!4Si1q-(I1Twc9NbkpL@}G*{3Gn@fsdK6La{krrnevwC*l5*&BmwMB2eo{^@Ymy3 zvnxP}JG6>6mR>W*TKE3|3v1ST?u4Fg!ZS>YTsX__&+@J+G^Iwar*q}08 z;Za2lib+w|(AK`IZkE?Z5rKkp$9&aB!A>i;n(Vv9Qm;)eVABAm8Cz~Ih?SR-P zapxzR$kii?OXLN{PfXWT8?oV4m%hwtzR)7wmdSnt9x+Q42CwLnrPZ+HByO!ZF&wxiF zlR(v8pNinnzQ(DLM77gTgwS>bEh^dKfZ{5PQ|>w5tQ* zv#n`3WX@eweszHisi$8EZDk}pd)Jzo^UHQ^2Nf%wtj!2qFX`HVDx z*0IxExi;g>^yK1}*H98?K@Y-zh|_3VmBT@%YnG-gG0H`F zSQV2Uff)wA*3-Tv>lgNq6^-b)S)wK#{{SveY^msWlV6jTm+ua-1g{WqvI4430X6k+ z!Y_~djDpVJR@LQK4t6s(b^!V0_v&jJ^xD-O6lR*S-hS1Z4bQ>zq&k7Jg_PaM3sWL-iHWd`Z#tuZvo@I)h^->yAqr=P_(m-2txhUQ;AY{IS;yizPM)M&wl=Tf zSXZ$9k}cTpz%}%CvGesW%v91$>n@{lI9-4P8)KlWFly6X>5r;gACyAK6po}-duXDA zL!IR=H@W!<^*se&(zN)Y)6KQ9Dy0c4j-=LYbRj!Coww|#ajEFq7P&5uaXb>-gmRId z2G%^bdvjj7;fvi%K+rWm5X_On(&v1naG2*K*OOj5@B_qW!~Q6J4(S)k)GdC_+sE-| zBL}xn%DsD5ySBOT)VgeQhO&xqfNtV>&n1U{Epv~pa7r#Sx;-j$O&ne2{l&a-T?r-^ zchfYA>OTJC)ce=7_;&tHI?~?S{@{z4ZDRxF=0XNeeJjr)i%PfDQ%<-~#7K=x-aSj2uUP0^cUR&_y%TvC1wM|jd2=2q9L7bU7bL@HPUbkUrVbkM|5>!Tj zASWDG9Hi0TMLXDZ`m}9wntVZg$xaZSm>p|pM1tKVkNZajaq}u16ZEcO1-yFHR`C~> z;3*^lkh%A*Xe@4K@fzQ4cPR)+*_ebtdW;VAH*={wv!T0++DY5&@sSts206#IRgw@! z-^k*uEKU#1rkvKY`95J&fNE2@zsAbZ+cK{b5M~#_0pqb;PXEKy>q+^=NTIRMyk+AYglh10!*X{yo7wj9Aw3LJBcAC|V zfW}WwqOrU|5q&oH1Sc}VARmgFx^1FrLqp`Pb5lBHl$TqHb|;?Xl4_@ir$&WkbC+|L z$?H*go!)yWqm1O4KX^T=9Tbr&qaFd{+Pw&*-H$3(O_shRN}pi2nFdT}B|)rhHtOE) z+{+7OcjKjKcGvw` zBKGE>)C7+!y2x?qP{AzgCgdHt_pKaOhDujwQDtVb$gYE@Qbln3xF_z&>7k-9{6#ESTa59)=Ju+V zcb7U%?00s1ONJjScNI@vyOo1O)Dh6rtu7VdMv&!#fn4o1sjJZLV7iTG8iY--OOj*k z2^`|O-+*5a^$&_48FWno&Ln6gzd?5LkCHfFD9%CajE~N^7$rA$u*QUwj+O48vtF9| z1>K&lqQ`J9?ydsKbsfK$AjUF|r?y539<GaC+B@c+*re z+q|*9a5%2R##cs0^2>GQ&MVITB1dy=rWvi``7!|@_dTns3idcFoF1(DuZeY4(R7Px zC!TpWMG_2_CccpP75fwTbHi3p_>1CIqG}qWVa212#}Ba`&1nA09{}`k4Cuq+KD`o8 zt6q=X2^0}-zTW=z?l#dfxFik_YSqh28BxL~?@0Lv_T3R_zYz5Q01fH-TH4!MX`z)? zJ)pZWB#%*ySB*z;G)s^%o`f3vgZ7^NpCb6r;f2)nkg@AJauaJW{PjY4jQaYIdiak} z(r7Y-HvnQ;aQgN!gpg>TPY_`%$`|vZl~U>T|_s>XAHIU~p9OGgrJjcy`R- zubUu}>N|xfNL>DP*LXX`ca~T1O0AG2DABhDoNpttom(p(X!xS?JN-p98(5Jo z?yb^T0-XG$btlyIst}eSZc;PrQFs~V)qWf4H=bOqA-34kN-%H@T8<`0+CjiIm8~PO zJH;`4l1B!Z(4M%fO2027^q|Hu{xx7y2*jFQhqzV@oFk>9CTIN!9R^OwKA;j zO`kGe-dx8AmZu;MxU75qUf$9dzPVCFV*ym-lUMaifepFI9@wlMZrEztRn3%5vO^x_ zVf;81+Ml|Q2BX!94gIvNBocDNKT4;o-3ftE+yVO4JtiB7^!TBgP-KohkI!5l4M{z- z2ElA|>(Z*5(^H-3#DtW9W1MhBMKpWlGNCyDXO7+J#hwWA96m-pDkt;4$n30l1ScZ3 z)JI3)-xjo%mpUwWThd4f*c%{Yy+ff~wd0RAP~KNY?merP(c_E5kh@w$eAzNfH(VO& zd=+lXY3DHI@=Xp}6&tp192kLagl3|Ld^f8$GO`{pJ5i`z810BwBLlTE(sL!0PERUG zB#PwK$mn|;_L@+;w~tVC1|^h!W3_d<2CVkCvAVGc2qkmD_O35V)k;{snUgBc9aqrw ztxGH7Fx&o^s&7J|)LHb`fPN>#CWQ7+c@ZlY?|kU9h3SBEpT@2DANvO5TQj0Ta2nt? zWjWdI2sy6=@Yjr=LzSio8JP-zO?ofDzY@oJt6khqaFcApD&zydCmxie8#|+}H64$d zd`~@Oav0SrEfNwx{4uQMYZO)|6bLB89I}u*Z z+2UMwvWal34YS~6WC2$#E~1Aq%Ahi+`WmE&p+vjMVsfAkl@-L^L`Yct%uZ;2_JDML z9lFvkwVN@1enWX0OoC0jk(h4zyK!Ez@k7KKANFK+nl9DzTzPOt{{So~xWFF07n<-$ z=iJEQ{^Mr4=_9_?VqHkP#}Fb&6`Lb^?K^Yy6`Yl{H-w(YOW=J=RJqmeE}~ha@;6B- z2RZBsuS)P|hVHd5J5}*ie=Jd#gK}htBRM1JKMLl&CE1;xu}1*CqM z^#Zz|1ZsMOwoPmOuBytCNcXZS+=FmXyM1ehv|49XEAu0{_-&%wYBuoM!jj0|X=5a# z{YW4;9^JUE(_TxoJj*6!rI({#@d*}UXJ@wLc+#~y#b z&)&s)9fqGBo|uYxl)}w4%zkGhuhzNRyFGeVNXgSQpDR;_$~E6Tv|luZ@<{GUuAasj z8dbDHzGD%_;nuvi>;4iwe^k_EvwM}dNyb(K^OaxVB;y0UdPjgXyGb<2^(2ATN!t>J zZ1O$NwkjL2>0a#U)599FF(v%aKKGU~2em^iZ569T`<2xE%EOxIv>V&4E(7*Uor4)< zBQ++i6_%nKQISWU{KjOkYK1J{J(`}T1qYM1e=aCUt#=s{g`y0iSoCJ{55e5I<3nCFeeB=2lYA6YWZ{G zPr{EG{4due()>xI%8`WzJD@Whnd}d(Z3`SYi0O25UNXE%tplm&wPyH!@f%9Gg?{r& z82(jjS45uJ#K)bd0~Lp%ne6A9CvF^ib6u3@Z5igy61igH9n%(P#(PvHe2J8T4o_;h z*8XdUEsRq|h-ttCaoV=Ox=L4>r(tlZq)1(e2im;%#f=U2uWP48Ak0V2lyv7jS8c1x zwmK`5#v34eSD)O>*E+mf)g+}OSbQcfqjS3Dw;tWiEsIEGMNTu*-lDdE>Nkr3 zZcB5T^bI4yejm2*HQuix`InIqmsDzb%fsGK#s?O=9|&h$q%=T+Ahy$YkzvdYt=K)SfFaZMKMQ!1b(u18cT7dT?17 zF~Q{2@<(kw^D#gNsO?;BzUOo~cP-y|j#e2h#yXz$&T87W*a+|h?GvfVLI1#QTI0K)pVp!?#Q)lE%g5Yh29^vOcjqxh>gASGn(kdtU%{(0IqM~1fuWYkA@{t{{S$AQghXTIj*zI zZkYXQB`13w)a_)7q2%ARV`x3==HJ^};7%=M_^aV^vJbkQ2oth z_}9W3XOBEDdEniCA2P#GkITAT;Fb3G9gnR|Ht(s8T56;$es04fDxP>Al-qOX+(;PU zE-TP~X#W6$?eIgz_Sz+-<6P5)r ztU-AySjPO46aX__4w#ab%oLXwS- zNwkfv;8^jD4r-;_#@fPLw@Fv!9;US9F`GBIIpeia)2v3Ts@q;Gf#k8u`qweLcS9l* z>DJdVI+aO#`qpe0O}MFE{mpLp&D9bpWBx3Y?_A_A7EQkOn||s=RvEuouPisIoj984Ya=nplbO@g|2KI8aXe~2GD#Kg-vR#sYhM#-ULGD5@ucCN5C)&CNV&#O5lPP~dIMO_l-s&J zDm9?hnd-hf_&=@qO5QvBLNBbPB*k?PQa#tTa;XynV`b!auS3#)KKLufx}+K$a|O6g zXS9^=UAu6@J-OspGp%WsQC$oDJ(xi_IYZVb1cIm6)E`RD3Tk?qT{of4Tq`FzAX7tu z)APk>Y8HY`q#l)48(-c8#sT)OkFsZbOk)GLilO2y4#&iLJ=TM%LLT1k7KTtq-3&3f zXSnNIV^2(HetjxAtQbeNWb4%8q^Tt0&x8K}XfK7D_rhNh+vvKDxsu-WyVzYPM~zNC zVf-e%{>l9Fa;w#%+N0jTM7(M6!^0mDw5HPhU!^VAl#I60lZK0M!76)K#=jo_0A_Cx ze$UtAR=L$;ywk7dk!)pOm4*l1BfbYyT{NZ9o+WxN_dJrtE={!4%V=YaT)c5P96r;7 z`5KL~WW^?(b$E|`r(8Z^OXg^XdKLhaPb|J*71jDt{;~oRXjU zXBljLDkepZyu>}ZsUf(IOL=YOQHdgC{Oh7ne7XrcoctG;nw|PvtY8@cz|Y)YTJAJ` zme7de;X%Ont_whRZE@yNgpcG>J-DvJPq}f0LNaTSy#hZGe(vF!ijKHA_p2Tris|f{ zY10Z@9guujBKlwR?BCh6}ztseK}yZf*reoBvulQwmL6oVzrzMpoMJnSM=qT z*ePN$)E@Ol-Pum%&PepDmJ_UU4>PGf>LT6BO6b^$7~lp!Qft&c5cyg)jC07Re7Di4 z$8op{^E(11nG|F13iU6CHZtlyF7Vfdp-+5E!z{zi$JbG7WZxz3cJX3RL1chyMi=Qo7zvaAueeC;IZ&!2DrBx(!KM%C129Z6* z{kh*E+QEkmd!KX7blP-rZBP&ZI(;gpn`?0Bw(+Rg2V7tiTef<0F3@7=Ij&lX-0h_$ zYuJfHmMXca)^f0fa7pQ!zYdQyhjeig2*w3W-XBjXq%HSH-K?y2-sr}Y=TpUJT)fD{ zsO#Sq(MzHj6L^y&+OjV#ok;<5LF8>SsK;N&8>vfm-326No-eXBvR^3X1Ey0 z#?e`q+C_nL6U2_Ef~+zPH6)3gB-V$_UMBd>sQ9Z#zwlMGLT+J-O1#)J^kyZ6)C75Z1A=D?wOtNBjPuQJacJp;x8KA>RM@n zb~8@%GozqP0&)NtJ-MX)qI+{8_>$`P`x;XA_R{5NF&?SB zADDVpDoM*jsl`SXk@FYCPl3K8@MWF0vwt_4ZGKzq?t_tzyN6?5b8%{Ibx^%QJ!|M) zOU1f}isFaBI@X6RyXlsWk;!p3*;Vn!2k&FCuMO7i<&RT;wC8%rLlQgpuK26O^D1ie zA&w%Wnp9AdWr6G5RI85O^!3o({IMVV|^W89vxvwS=2!=~l6S!PcxHiHQL8!RcO?s@_L$;mtQo z5_2OvxE{TYaQ+pu0yu{lQJ$S^dr_QSYV81WRyda)fcjLqHfgMwB9_L*?O94EOdZh6>%0e>cQ~#; zR`D2M@w~kct!e6-sYsd-*(SJ;6WlGjGpk^l6jLUwnni2PN)di9C4I`hReOWQTljzC z_%wSRds4XDm~_j56CMF}QU~*{SlzVYlK|vn=~LgvG>-~RZpriN18ZlGls8|>x{}cI zoSK%W*k7@xsUErT3rlHb+}4*9+x*>vw*&I8OKVt|@DJf$JO2O#@VH%H;Dz>{W#-FT zb=ppF7-qfcbjX0mKb2uOsp?Wvoix#hacoB*aaCoEfq*NcpHI)-ff{XV+V*{<8cWg%%HQ`K^(@vc`CX$|gH z@tYUDNYnroKdoR0gG?BopsimLUZuvTERnWXJXH&Dtsp1YoYpcoj^wtnNDfFCt;;E8 z({!7Q(7(g5VEX?6`s$sfv5|oTtzPjKscWa{M?sGsbk^m@;gAk+0iuqwG^)~xqoAv| zgEd)Q8E$N~IMqo!OB{>C52bZ_9p(MUhA!>kycZF)CM&p^R6I%9!9RI(UVY$werYu8 zoia&ThuNWqM`N7qV%hrkuFK)Jt8VaWI%cJ7xW^kwYjB4KG29XSL)y1*T}g}6LrcM4 zKk*lZuOif%>CnUS7govIKJI?C?fwS%qj%y>JIZO+OPOX%h>-ldvB6yWV3G}d@2Or| zLw$Dkj-_Xlatv{dR{sFPzZhL;{w21%`$7HFf~UIk(0Wpmg^g&(*k~?XO0kHL_ z7pa`0oLTvo@q^)}t?-w}m!1*Tr-DaT^A>0`vOq^{j%${|-G?J1y?tr>QhY7A_}-eo z#9boMZLK^y3R+AC4bjL9I&uYknIf3Z*IseWc2kkyV(B=}=!?#XI70sbH9o>Q^$YaV z?BHo8VU>*rN$0gkB0#)^=RTFK@ONDP!T7zSUB|aFPvijOAa3Kmb@Q!Gbe+ypI8Cmt zbju#lW+VaF_x)>T%IGwKP#m6wR>#J_1Zo}!_@k_Ne^7uVvE7+=rZ84doDq)19MwGz z$}tK~$TRC)5R|!@(-gU`bsh*q413s;PDVYco-WiDJ$yhLaPn6JJw0koCRc(e#Aj(9 z)k9jgI*z>ZzH)E?9fm1M+9F=_MZGrYN&=2??^<@&tnRGK!8pk^M@Q1cQUC$tBZ}y4 zEn|%r*Mk~PO&pl9;UT2i*>O*rYP`Qzf7 z*kER~hZv2<2=B)=P7AAJDy6gJHW{*e)?Am3KBC4zN)hswH0yYpI85m03n%6)>Z!uf zX`eqMO6GOn81oM3+@0OMt6xePiOj8%>5-34wa(jKEyOI3z;+oe(~8#9A-b91n%-bG zhVw~0Z5^u_M3Zw!vy$TNMlrMlf_qm_rQ4*pEpZU!sS&6@#gzm4n!?kdkR`^TfbCc} zC?1%ukHI$TL^n5#%CZ3<3&{)X^{k%8>totm!TODy-W|~|?gGy7CE7_4+EW8120rs= zitDZ{{=?zebgR~oLb9{6Df^*c2cJ;Is|&)~*`LO`cZs}AlK%i_X%_K$YN|H?Qz|{! z_0OoNe0Ag5ZuC2CH&2>a=6j}-QI<5xSh7x6*DUqs)Q2lQ3Qahj?t|j1KMd%X-Xznl zmgeVGkv>#$=32l86&L-ct<(3n1Fm2Dss|+d*VJAF@IAe?!dmIJYbM6c#G+0CCm?^cYnF05 z=*7vGbbk&k5F$>+f{gCRR;_CtHql@A?AgzybP#wmRJcv{`h1QDQmfLWUktUpraPM{ z3wIus%PFRMk)>p2*)(=C2eybX&JOX#Tl2K(GUmWB{v=NIcm5I6fwDHkbzJtT+rz@& zY|&*|dSm?aT)fS*c&i-M`O&Xgqqc#ebsu#902O5a0A$!Re*QpkdI8?M3#}z2GEZL*2I2noCo70J*tLls|m6~=YYebu6x(XzB`6j*RHPT zkTV(B4T*qAHR*l`__^XAhx%Rr0K>>H?sYqx$aZPS>;A7jRFm4g+r;PXNiE&Xk|h3V z62QkHTRhl;c!mYrB8s}luf`F{p&pm5<<0=uaTf{)Xvs3!lB|HSkfl243bIO`0nWkyad*{Ry zoYuF7ES`4UROjnhmNqPK8}P2v!Ip@)J5R8z8j3c8Y-R1c+uR|?)Yj}M*KK8J5ANcN zc?bLEpw%Ezc%&~a(zP^eVHguh#s_-msFOE}XHDS*M4cleqXCb`rm!UHH%{E2JmAzm z7h^815&?i)y-#Kdy}4x_W;p!o1ED(`x^={B7#RTj*F&b=yw>Cc^{!(@(=YF)0yx`f z?gv`zY_z4bwgth?IpU&LXGJ#AnXIEk46L>1el@>#xMbbmfnI^)X@G&4ae#4NQ{r2f zn*H+J1I0L*)prv}b0ylVlZHIiuN>Yamrz+iP9TVbo|!z=e+|Ua>IlOaR>`IDt&QET zs0_+6Bueo*oU17V7(bZDs5Q|IS?5%DX!>LR35k1nx?hU4DIYMIqPbNE?w`7P*VSea zI3~U>{{Vt*_>yZ6j5l5omOv9%vS`#3-Ec?pue3Zxt!Wws!dl!Ul1}5~wSZbl#xsCnS3s28la17#w{okyL4&G}@9m z`SjAtWL3|#cn|y(14z?i@P~yiwKzlCPpunky~l{W#2_&~pD*iQQ5_KAow?0;C;Sv| z!~TElsjuokV0M>JyR^8qbUdqhfq5OqGg?xs@7Ro0IJ#}0oLZf&)|0ATX_snNNG={S z2**S3MVXPD0C?}3wc|Z<4-@Lku3OnXyH9T)+9P(!^8$rt{BfVgt7*XbgC{(6uT?!! z^Ampkk6-GZ$=7b3Oz<^r;U=@DQ8j2#NGSirleF+bkn($dx>rxkN3$G`&)Hkp0q=5 zmm`18$@Qz+ex71nyB(+2xup9Y3qt0L5NTACjiRuACkx#_UbwipwU!VqgFqxgGNZWl z6}zI`TmJxPT+Af>-Kr=D)rsc0xiv=kho(#5ZBG6*xffC$9{PFk!*Tho*q`8Tp!PJW zwvml_^ey;CLt&x`*yZIxBV*pWFAeyJSjRiY_<3O|ll-fYOPif34xOmR5#vy}9WXk6 z7^nDuRF?k$Q2Qif3(00QIY_8VEgh}w^UI@J+?doim{hp_EcC4}09#*bUM;ZGZJCtK zcN%X_qvjQls@#2>!*JWgcQRZC1a&|CYV{Aq8X&Z=hTP0^97+!z@G9k{t+W#?mr)6kSQbpfNmvo(oKZO> zdJ&0g{{X|jAMp*h+dMm|8!NrJlEUNelm5$BJ#(gNR#JIaR;;bp+a98~QpQcZMnMPC zqA*uKT6cpba}1u za*=-MzS6_7#Z{jc@rPlPoCT`(=zP5nR&M}l8fSrK@P3(Qs|#B=`FfWlJBd7=eQWbi z;&+Fo@W+j{T@F+V)|!No;YJ1l;=fmHwE3<{nq=6dfwcSA=a>8xE5qsG@7iZtNTMh& zEp6^VBOJ1GkIK4m)vq*pxY|+Wx#6&=cH577!PO$3O?FtV-IcA%x-y&`=Og)7MPh;6 zN1lD_9@%7Xh!?Rq%uRP2?vX>+UEL5(IlCMH!3dG z{VMK;r3j3k0p}*EEwVyNtXLzF^sP-171=;ubIm0b^+vGK>TFpfCE9e@+~TJDG-C0l zVaYve3#+fN$rCa4$i*_pQ)%uCJ}_Ibt~$+L#?+R^)|&z-XB|UvS{8)w1(cDV=CdvC z3dR6GrE6${8_Cr?%n+oLT=905j;Tl8Cc3ygQW)gw*kuQ;eWm+2_+D=d{2J1%rnXp` zaMR^_uw%hLm3&K}+e@u@rYkrY5YwtJ9CC4Aq@EDaENy%@Wv2%YmiDO{GASd0fK}&e8{O1mJviSHxseC{vOrT_%}?r6E~X|$}P+CI#wN? zpEbF}kQpzQY#7t-op zn$_^?K^CxyGPji!$gU85)X2?Iw7-E1(B`!b|y6iXTV!17Rf6{L8Z;}!MhjjS8HZ5joDubv2wRyixjUYYg9cy*^F&!WR9*;wf`j}!Qc6gJ6t z>l+Mg?NUW$sa!iw+uuIb&}p6=v{+Rpw8;YmZmm^-*xH+DAVftuXJOENYn?Wk=r*Ep zFQ#g2a)`=5-KKwJ>T>*y93N6EE+5+C;GEwZVE7TGYEP?c^ULLDOiwl#-@29^$9}@S z8ViG{YX?t;*lbP0W@Y=h=tW+;v`l9iY;kjFFBD|Rr#Pxh;rsjIR@&9%a57a#Z(8(S zL&J$Guv6Qd3e2+7?{tgAxVC8&uct#v$ylqS&Gn5M<~LYwVPqiVYYu6V=tamIxc+tQ z^JyApxo95t&NDAkO82T!=yq|Tw4TB+tZ>h_n$MY}TD6ZFlfn1vF?F_P2cgFmG(Hr# zVj`9=m=2ifUdc9rYvqNEE&%yLt#f+Tm@O{%hTWcc6|Xp&xxig~Khz#XSYThAbtf3C z?Nh-QmTYaMjhTZ3x-rKU>pBIswd3MMBLkYJ;=LD6(15ztwRkO}nnUH8Xx>M#B94ii>XTP<43;Rg?81a|E zeR2lA)h->jGeLG7m}QGNUoD3j$8SpXu(MM~my4+2v?uuMYoU0e;Oo8!((Ux5wYmNL zsd7uq?~)ErPfGJi+F4?eBpG#+4bM~D)jRE1Um9cScgHDk>nQU9$j3@FyNPFy6Y%*v zz3bPEQb&zCu2UH#LwV!$rLm6}E0Nx(cmp{+)b@6PTY2qL_bqI5e-$jn)~OTi1%;}D zYm;p*bec2e;8%U)dtKU`C6hn712vkOL{k&TZY!R0)Ja{Lmi7%CNC@lKis`%|5R%|T z7wL+m+Hh4=*}{$uX!v43ChO(u4CmmmiucNQn$q0U9(A7%` znhTqc59+dF8Ki0LrqA7fPob;Yb+K>o(3zflwRkbJ!E=M z?0ey>j}ZRR8V`nSwN|j6=6O*r!tP|k6Os8>=|6@39%+6Rfy!BZzfzS!p61|hXCPyN z(MQt1H-BUuV@L3h?Hl1w5_p1Viq_uziDX9#6fzN#Phr--Q2agdkM?ZSB@spAyDPi1 zmyzPs6^gHOj>;>O5~Uel$E|{KFrWn;s^!IuSKK@y8~u?` zd_4W9z6ktB8okY(=A10-?u>eTk9?wAm=uuLETsIZTZJ8PGh0_b8UFxhn>#~wbFS%; z!^Y`kyif*D9Ok^axu>%|15GKI); zr*Q|f6QGK2UMIjW&isjqdJ)a7*Ax%iVLEC2-m00`)M)Y^0{T{^?NUDIbO^wY9&QFjXB}wS5IYg l;~~|QHOzZgjkqTjCZ8K1#@u>} zvv;FBXU%2EW5qfhO!^4+u@@|)C@^M}-s!glvT8jOe z@<`KnK;0{((Cu%$h~bvu+~b|hNG7>KRJuAly-s~E!Iw9PcFR0OG5X`5!o2NcnoUwG z$(Z>qo!h$dabDZ-qU%L}87F`>91l75CY1|a87y*bjf9cWSF1pM5d;3IW7ydyjtAABTR~ zqH2>zsM|bJfIwDL*FC+fiiT;J?Jm+B&{g4vndi~zj&F$(*=!WUjNOk`J!L1tp z+Dli}2;csz91z`!`hQC1JU8&39~ECr+RmgPO+5XZYDdJ94y=Dln@s(s{4Or|zEsk$1(EZ^ znJq<q;5}Ly zkhQnk;ccOYRN;m)J!_)~y)}@5jG}-YC5I#cySGdo}cF zr`t_(BA(I}UT2N7k(M~%eQVl&3;zJYH@{^sgI^J!#QrMrJ+7v|Yu}@PY+s3Dg$Wr9 zfp)na27a~W;(59BJxX+??9`F@k$>V_EgJ2#txe1~agUxCQV9CzrB9}KsV*6Qz*sTP z@()o~KWz#0{{RyH*q;!!3n=6JIPo>aR_zjiqBO#&P(OzQioo!#pn@>r@G-*?+OndX zo!Ok_GeiPF)y*?cy;e$q6#uh<8 z%D{PJIVa!qugM>S(%xuZB=GjLBux#i)}~RW#(c<7e839(OUA#o=fiDw(?`=hHSrqx zH6#1IMmDlwwq-|R0mve{aTA}Bi<#R?NRuPZWpA7E^{w{PBo=Y# z%VdHDQ1??J{Kbg-zgoFx;Vno)zuJ0|kKQg5cH#*a?py|;nCu=JR(wM)6bc_Q9PIwDWpdu@*jGRHZ7Tb_9W?Qksl5-n)BXy>@OMMfq|~f@ zAL0#8Xr1bMSzz3=CUo`w#{l9-`kJ|~E+^_|-)ZS>iWxbK?y@+}G`4L6ooasopTHkzY*ZKm1J9kukt`G@7&Gy3+ds%4ZW zwPn)BR$3$UlGpwUgZnW}ZEJZw=CQg}0`AppK4?iDbJ2x)_r(wRDxQwIqyyn^gYIXw zjq`K%;1#7@{{RU0uZ-<<%WGlg!wyG(pOaDENof-Kk-!v_!4;fxY%R5B-`X`^=jhkN z{{Z+Y=j^xQ3n&xBlIt1`-v!P0`8-E{?!H~M zx)e>l$z#yh=FO~^lE4Zn$8Kt*_C6p1r?-Mlq;fZWw0~gXuXQG9vntlLkFY#h{{RJK z@nzH!+4wu*b(WqTaF3 z{{U#O4oEJ);UcrVC3cm&v;|4^M+13Hm!7^f!m>}EIdDNF$HBpPdRRtYXgPp6i3#v5?>L(P2vE^KsUc{ z8eHA2#nsj0ylt4|l2XM^dwN$x;(v?361->N`(FomuUfmmw6X+Q$8x*8piej_oYzHl z;D@*ch2^{>xiSip%i-0$!xsAV8-eF7)YYsV1n*>G-Dz&_d1RK_waW=E7FXJ#S90_f zHJydxsgT6L1Iab(R=*9P^GLqdtpfJL1DdVkKM3g>ewT2NY7t2LK2$;1@uAU4XvC?_ zZh6co+4;KntI!>TBehbOFv`)n`>l$a8=xcuJ?n<1=8TKQx__Sqwd${S2OX<3Oo5(k z5yxuu8;=dz>V66PO}v22t(~X0dgSb^3==4jVTt4e?^sSP4R#$pfJ7Jvq3u!W7DX=u z5ypFp*lS?Wh0hqK>DEdvRI_k;*Dk4>#Nxgr>e{WRi1ygZ(W={~4{YMAXnKY8{N7Gn zp60s`ih4D?#@<*HJ7U2&>T8zMboWOEh6e(iz3s`olRY=!%q=#j9ms#UcVp?zdv&9k zAZv*R11aMj4S3hVi%%wP`}hh^BE3f2RFz^dl1h>CoYysL-5!PgHMR9keK#P7OX=W`C92f;g=GQqirW5}`N^^K)A{MZIEY7dY(8)^k}}ul9wV)bL7jH^Vb2 zIqkus?^3vHUn=5xq=>Fc$va@@@#eEFuJ2`KhT1Sx9Ftb%((ZLBHcvJp3NWL&tB_F*27o4M=fn{jiIrV>seY9PjPmGo?IT4 z)M_7Wni*`@B^od)TvJ8Gwyb%SdR#Ec#C+$!YS_~B`5I``J4+l2y?3OnGLY-e@4f3+ zG^<69!rC56R70CGC#(K|>JXw3<-EuV- zch0g;v+kA(6p^1?X1lK$+Y7sPm6vmw4oA0Y^Vw~fPF0hP{#0s`FqD@u9+@FoF(=_y z&W$bOeYVJD$5r*HqqUAT+!yJZ*6{q&MKXy00KLU+84(wD(Y4_F+pDXaeHunE&IsJC z->((e{4=<`@E^r%U0*<*>}6d2oP)F}$6w6X1*hH1Z{p{@oDks`85}7cFsluX%EiN)4(XnOVf$kK(2z|r)jlata6H4crng4pJadfJotN$5 z@y|u!{7Gqafx|}A*$u#tLOI2F?}(zdwMpbMZf0||e_H14qMXXjB+fzX$B)C(k6}hz zqeqX$EfYP*#s2`e$H)DCC}RHrgvl+^C(A5WYli2&W@=xxf5pEQNU~}_5HI5k9?&c? z5CgyGUQIQE$T&`E);cL9Z)5q^v~biEuVG6Oh?3SvM?Sr4_K9x%nVw0GW|-Wa@*L;p zAoEImU4LyEI%chRY)*6JUA$+nv8pyYRkUQNk0TvTI`2lbnM{g=9C3qPl7${!k zzU6Cw9(bPG?QbISRo9$^?UU^%c+XyX)lZ4v6nuT-T@Kep@n4K@bvtWG8cS%d=Q4Rj z^&p;=4EjVZAsbG6kSi0$HqU2b2+j%T+PS@*JKn-|sBa)L)YhJl2vO!YQh4IA z>?Ge9BdF_I`gGt$j^NQ!+{31{6Imr6cw@~+rrd~Idw1{YNvuwe>ZkctOP6@G>0Azp z*V3|;&|cD4J=0J9sJosbA}tU&OsPCuSocL`(gOw z!!piwj~7^8NpM8#sI9I{jgC}<_YHi?ZtW&m?yckn88h4LwzJisw28L7vnANyDF^Q3_<64(zqm=SUAA$Y4m~NT@yGKBaD73=c70EuQT=Jb zSi3H@qiPXgrpS^6qWCpO$sc1n0Q*70{=E{xI4_4vXQR1KUSo5HCHXmfySz!-fR& zpK)D|jH0j1Zs7VNv>pVrDwiwb(e`94ZX=FdyoryxGsbrj`qfzcC*vJ8%$Jj*yzv*Z zW|wQond9kQE|uY57tUhRZTv3|ndP`yrM$T-3ZCp8hAXg#!QLg23>Mm5j-xuqyv}YC zMj&=WoMNNf@~bdZWcM)itxJ7`7FzX%rO|~xMR)*cv($zi{pln4JK+x*UH<@STUgAK zqWqv-VMQ!H4)HXa(f-@;?x71d5*tY^&K-NU@z4&{-F!d()$rD<_Ki2my|j*D9l}Q0 z03Hfw*0TD{={v1a)f_eSX!AM0W^Wm5o+3*R4caS08|OQVoOZ|@{x#cazp&4aE@b}B zisfUJuHz&YvNk{2&#iqs;a>tn<9%mOzW817YU{$r)k9qAmJk^wM&spaT;+l3#}%*p zP5fEVJ`d>j(Q4lnZr@kBa~jy_p&C~Je9tE1`@c_KX@6){pSu;Qs>|u~UF>|v;ZN9) z;zpIK#i;nB!FDY6^BDx!(jC|xK2>ZxXZUMcf7xrs9xv7!@57pvjj>|JYY_&`n|g%@ z`BzEc-`dB)xAR)~bH%A%0-k9Y3S}?}qsJM&_^!BVLkHpcHq^xaE35AlhQQ-F=;y!`v?4YcQ^BB8gwxQ0Tx@5q@P;) z-&WB)d9KK|x=TW_+pDT9vOBO}N6!uQuL$^$ru<=sYmF07vAKpLhqSwkSD2%Kc?-`U z?wYt}c;3Q!ei8IM6UE=M*X>tfc{ZMW2L+~{Wc}U5cc{$jJC1wSF17F{;>U-g^EE#K z>$5^YS!cAE7~}UVoa5YAtN#GPmhr~1;hjTSz41-jn@8TZ^WI9vKz{bzn&aj0)#9g> zKCOO!TmrZ93=dAV+fOmB-lsgW%ssu-d1j$+ulRE4M)o&$vW4JWNWq8aR_y*D_@t`Z z-CZ`!bzmDc)%;n~uOslroF7nqhaC;RCZa(45!N~G65dGmFd&2 z=Nq0ydPbA&`%~9D`$i9<@nIdl;#-Lk_n++NJ!qPa*(B`%m5cD`!C#_mJ*_ zP8{SM({wE@bju5kHu}Yc(y-Z;?OGNPt)WQHm`1^dAY=5b?Ktjq!u>@$9PA6nAtxOP zq@K!AVDY*YDj1SiwkuW}+ju2Mofs+TG0itw@fMdZo?(O?!=_O4OPse2Zg~c=a$>!e zRPY?~MOL!DK6c)@KD3@Hn8m8h8a#Z*IO81FUA5DFpcT)xFNo!n<oWp4u1c>~YZO*Q%mALW_yJS69+f5X z$7(#cJQ2b4s4bo;aDeqV1RCe%E2!TV+q~L3%G{iAYnp=2CB_q)?|eR&tyL^mP5hJ372Rr(n_C5*)j=VCV~XNF4#RPEqs=m=e5U03 zS7O(4HMxa`(ax#qaNUFU1HO~*U1MRZ;SN~=_z+nSEIBzpl|JZs?uA&8e)VZeR+h$UH$*KU0ByFBlRT0tmaV3> z;0%^TE;uYPir=u=7+ClNJml5QLK`))l2Npfwb0{rayc42KgAkx8qBexoJMx^r)kms zkOpot!nSpIQs(w~%KXMdnxSkIGh~h@O`mZ*9{bnu~=|= zR+aU_%cCminA))udq^-&N$W}}HE>&$|}Xhm|Di1pJ;8v2dE>xCYB?N%B;JHiMHp~t?P>GBJ+c{2CCSs zc%6vkTbkh0nCNQhl(lPk8r=P=+^;=Pdf+u(8R5QnjWZc+fnK4mT1j=GM<_zX4zSFIF-# zH+8IMXrdz>NpEbksd=dh%PNK}c;dQU1_^A{u3w$2!2+nCpfX+YEu@vBX40-e`Lt5x-rMSYdxfa^BtiXs9G!3Les^J zo(Mgw(n;FqGfbf+oRKbelk0(1S+@r|>!*RgPZ5ypd ztcp=zUc#V>_#H;dNwv9v-7C6-*Y|a6SJoN{0-Ytc591tWv;P3$IaF<1wbtkDNwA#?Vi6WE1115@;?-a+(x@J2e98 zERchRBd-;dWcDegxxwm)M~E6ga;7!;Ph*~Iyzm4-$(8CNBzysk^xRc4I-*{5)1G*Veke&b;#XX#0s>sMO z)Ku2DQN&wx?azKIrZINh=czAv{Rp+uz6AJ-OtrU{QnQNbSf|=9bm0+?J=X(_RF|F; z@DG6?wYW`Y;`-9j3ZgZONX!Y&;{92Dt5-w#OXFLO4tVrU15MN~E$$d;_9o$DX`>_N z!*`G#gSaBG?mTm>d^hnef_Um}JXuQwi+QAKdTfd19)PTefXc_e1XsKM&)N6kr^WvO5l4GB#cTZ|OjuX`CAxm!B_!*-=LF}yXM8UGo4y@*8(#j<@ZZFp zPf@?Njibh+U|P|KW*(#7yDuDg!{D4+Y&wp=Xtw9<+n-Uf)4bJo2ZQot55~Du`$TwO zQ8H;h8S#YHppBN+@@as1k@wi%r_k3|x2-5y7|xYGd)W044naSPBDwJN8d~btx^#t> zT_<7X{b4#UR!UX<~iy{c&}dYPm2B*=^i4v_-j9b?))cx713|(eP@entb>CK zeZ!yjPkOhe{9*V>s%Y{~d@!0-+*e;@xwX|JF$m>iLKY{2kB%IA(HvZ%@PneIx#G58 z@JcU&7k5$Ic!S5DCtcAit3e?0LG<}~HS{loJ{0S}4*VBs;4g06-d_0F$_+#TqBfqd_yO6*bOtFOI6!iq>n)$EcU+uN={_Xsq5qv?^8sbAbB!Ub_ zx-*qwk6wg&R8y%{L!B$5NyRjFelGZR{{RIO@L9L;uh^j0M0N_}Q?^HO0~68z0KP^u z$gC6lMt;yg56n<}Rq!3=it@LTx~vSMA(4XMFg~N@I3D%t-wuChuN8b_*Ryl0OCi8^!s14C>I)K_$hXytEf|qBd@JAvy3({A8huYixK<6LYPxJ` zZlS?eX#1;>dHMrO;~(31!}?Z=G+%@t2&^7(IEfBlx5HLwra0i>+E}TD`y* z7OCaOEMsob+=1&OLdd{{U+0ky!cX zY*A%nKf-fdr2ZAZVu2VK$@z$3ky|UPYctCpvwNn>2}ME~LEeM7$6^g?qv(3zFzIto z6unv*;=#wUuU>>b%QJ%$B{}GFce-tiHos;|SyDvkLk+F(Owy(rW2?*%pkcUfQP~;>CckbNr2#fHGyNODTsO6S&X9m&J+68tpXiJ=}5R%jy>5 zk<)J{j&%s=d0ba(@G9mxG>NSN<#B=dR~_+M8-e1Byh=zo&$+G|u+-8j#|7d&Sox(v z&os>^QY@nHacH$MAKHwaeuUVdFULYk~V5Mt{GadeV|ZZ{0m}_H>5c=SRA;lN+Qt3*3R$ zxBNk+NvHX&k&pm5uQKo-i7vb?CA_w7!K0Hd7r4mC{HxqGRkqjsHZ{97-4)Hg_4OGQ zi-El<*)zs9=#9j#?hbv$XWHq8TZ92V_oa0@j)KcEMVxD6%w31*9XN*-P)6VVj4o6Dw{8QnS`wVQL0I%3%HP6SVt+KERH+RoUQrHwr z_S#;dcWVsG4(!Ic$@Z(z_;&SBNKlVair?^^fV$Cb?jsrb2H<-d+!hg-erU!AwRF)@ zlt=8_S{yW%au1S3=u`NQMo$#%leVdlSgc9{0m? zAtmB0M@pzPi?!h`*bhTp)yxiZ6+rr$Wu}snu=6ItC-_Bdp=3i%NaJm^tMMibEIl}@ zH(Ca*cc;GFcEa_~72aF;dfl+!YX1PoYD=9IM-ef}xO!r(t1AMD^RBC5cWZw%2;_D5sybD#n3qfjNVa48Rn2S8xxIO$=W`sK^*O7a2h&pe?&boS-E80wUOHD2 zUh$%`OwQ6|H#%rpn|Bf!gKPryz!U^~sbZmOu}F)$23c_)kofwU>wFkbetY%-H;^6U92TQ6Ux$oqbUim%&{i!j)gox*+{+ptnyys&Rm>*_k2LVshoW3qIlq=@ z+azy9&0^i@epJq}DJre`*KOdNE8h=KCB*h*sXT`0Mk+mTO^L52lFrO5Oxu-xD;+E@ zhY1~}T<}>9={!_R2LkHo^*Z}UVrJEe@gU!5d1H)gg4tQq>5#cmOvlhgZXn}}A=E4+^HYVuW?(X`ryWMpI@h0kJov8qe}R(Lt+OBBP@2J6334(ld(RJDW zB>1|=MP!}{46<6zWwUoZF%$C(0x^M}yi_;W{{XW0#E3N8Z;KuwveC88LK0AD8je;% zya_~!fsbb9u6Sp{p9208-rQ=|f3z=%;}gCkwYZc|1gq3G*2p8=b6pazCaoPC@-x$^ z?Ee4?ymRpTP|#)2o5RR7y*ATuy@WJCIXt9ncx)UJJ?pi%vG{L%-|&-oXlYkFpa$V= zqfpUH{{TH04nt#t2(Om4PueHopN(~iJ{;@bBG#;Q@P1uIVpz@5?!i~?f!V8>pW<)8 zF9Z0)PVpy({C(oIxY8Uy+bpq*)6eebQ^cbK86vqHDpb9n$h9cjIf^Tt$~r6cMm@R3d0&UE{0Z?3R@L>73Fw-gobPP*y1#_HM{Lc2IVzim z9YN(vkVk6rSwC(44`0>otw+O|V$fDUIab?Hb(tnTN6LQbKf_Y!KegY2uDp0|tgrQJ zNR;l9?&Xf$j)~u);FH#rY1K)OQ@Ux-7^_EUOVEZbgE6< zBI12RY2$x}KM@r^ELdp}$EDp_pS9i)7THNV>8wg=@|zT>jvvu#p-wZLi@ z0@q$|?A=T3_x4vCC(-UCD{zjdrU>T-yG>%=Ux+>$o5da`({(Fp><7tpb8WIOn~;FE zIBsx7dbn@g*Hekd{{Usr8tZz}XcB2EnuF&8;0S{t=Q$_SoY&DTT?wrZ zGwStL=aOB=X6ysF4cHdNRu}SlZkBg@U1T8QLCEy3+u}FE{U^iPlUZ6%6#oEcZMuxM z_aa{|vq@vXkrW zT@S;(M%P%<))57;yUrP*h$jC4d{=!su$GO3p)_-snkt#CtsuXYOE|;ZMx}@1Yg0<_ z&aI|S-)_H8BX7v8-94*MSGku@yk@b2-5MTVOM$cwewnVfz`9l6i=sd9l22m=s=DQ& zwA}1ZZ2S7wv#CxS5UEN|);PIsr{-@$78Gxvz&itBW*3HY_*CSyQO+wq43jD&^4+2A+EIX9X&*nT7>=Z89Dh#8*%#9Ts{r(?y++$nl0>*Mgth- z)S~i?nCcV^?lmrS?{xPzSZU4%HkWGtbaJTV9zpM4PWTJqM~UG0L2r2_q7++SoD=GL z^{*W8SA#Um8!KNHYmnScV6w3vXN9CP5!(yRdOCj95$pD_=sHJ-u6&7QSkum#qAHo^ zaAg?(0P9v$tIMi7DA1C2XFaKC0yxMqW0Qkfx_^gdlIcFqtk1iJ^{$Ij@dd|;yiIka zd^+$>sja1~W#vn9vPrYNm|x)?{VOL`@r3>d*4tmQ(;>dJ)15xjoAA;|FvcWarMnS| z&z(sXEH#d|#vTjOtUedpYd04z_w!tYTXNs(#(5{&ywdkeWsriqdE^70YR`zjYKHi! zcx|-bg_nAId5tPV8aP*G>~c83sm;aA_qI0jY4(swb8J{aHRmq%b;o(O&NuE?TI%w-sc_bw~xX;8`L$czX|AG64hqBwFI9rhFM4PZ3B>N z$2=S2PXYL5(@^-E;;X+C!fmW1O*{S(JLOAZCI}57`^-aj=tXn>8~vyLDEQ*)dw++2 z6YpiblG$zC-7=(+r$R)S1RtewRK!-NFLcrnj8><%T6kK<=W->HjH?ja_+UZDLq+ey zdCj@J7BhKE&zKtv`Sh;{)qWs+chm1}&cEY}t+L$tSDKBbr^y)~DOHh&-^cMVNeJ=Y|u#?F$+|HKL$q8M@bH)3%7{K5jdRJUAc$K)SMixCL<3#Y} zdW^bep{-fkO9KHfoi=g*0DRN6Uk~^nO|^Ngej(U2t2l{FXw0g|p>IL;uLIWpANcD{ z@dQ?SF0Pu3&ml2_BdR+fAq^>7TOJmgU{7s?2 z(i?*@Slern3C3}pX1QMz=spVZ{eq>X#FkAw*@ohH!ClxR2a$klmehQ0;;#W~&!tPP z>RR@X=g0k^>o?jZi5WpF8y}b4AHde)JbbzfSS()tP%K=1l`y(^)P;FACeE z`3^@bk4|dkh^x(V6&_@AemMAeo&%6+S31;p6FfncVjOdhbKF-a{g%=G`OttmlU*g2 znc+*VI@85NUwvt<^+;A-E;yx!?N;FY?dFk;k;Vrj)~@)|SkXLFa@X1&lIgCqvYi4s zTIM)o##AGa4to$mHA@j0bg_)^(nnwWI(SCo#6B{zg!qcq+DkYZbdBa#S75mj76%AV zOrG`Zx3F5>+I^j^yn}HIGYQ}4bu0mqM{(EYub+MvN#idM+3MFm7x2Bs(oJIXUBhc+ z(lVeN%2aXy$jJkW=>7<4-w=K+c-DO?4-je>(b;%r=JHuA=2I|8S1O>f-~xFaYAQT_4EzBlW)aJHkT>-Q1J&$>jC1S!|}G6`DeQfc18 zof6Sg_*&Y;Iwi{Ff7aIis3Zx7hB%R#i%cUc?dCreJhy0@JEE6`MgQvFB$6Q{y@oUQs?I0zr;|S0iR0f z_MF<7%9_56u+S{l#TafPtLc%wwc%GntGj|tb-HK3{{R$d8q78xBJmcVB3{Ik%X00$ah%j zR`KNJvl-*?uOqzup?)0rQ^bI-ZrIXa;C4mRqYfftw4D;Vsq4>^bdPtil_lL&8NIou7p?ojTc~TV&sZ`^}kQ zpP;8%Y1gn~*3vhTcYWb(LH#Rd!k@LzjJ!#$U0W~0%Lw$yA~!boR(6QTZr|hG(EDbs z=^wUcw|}Iy?yce5TO<-lr6#sil~qPp0Rabb+S3r4fJw=V_cE0A0Jr-6f5`e*HZ z;~90-vbFFMTw3nLlUn87J@5xwQpdY&V(_p?;yiuu>hHt;9(%~VIXWf)4;k&u41?%L<=9i!_&cw@(UcA*S6O}ooWP+17|$*j9y9e&F; z^2nOzx2VA?0ussM5wo^98Oa=7;x={hPJV4_bJi`#{{i^Qev+T}5TsSl}tfIrRhz>vaDB z*~j4am8!>e_9<>v-)dT{frB?wk~zTir#w~}tHtV6Ff&qDN65P0?7!muPX6iiZw}0C zIP!HF6KXp1gN||Efn3{u$sRTEpN6k|ed4PD4Vq2mr1Bl0G3ZCFeuG(Ah^{8mBJnE0 zACwKAndK@ZcV*s>2b#3y@j`uk(mmdK0|!poox(KaWHrn30)p#pm8$dh{pUidnyA?-p4Wn=wFy;|x33uxb7a@lS$u`>j^T!&1dy*1}8M zy+6w_dIeMr3%*ZY)Wy&~B+aGWYc?@Fn2RFD>e#j8u_G)&kSZwS0;-leaM2wx!lu3B z=Kla1Tu5wx;U;Y^#_BzCHGMiwLT&DcJ4plo+Q#ddtnstrNB}Jz2wNGO$+}34C{to zhQ4p{FN*wOWvnomDOOW}KF#X^=<5BK6`-N&VI)0`x|)4rseF!b*Z z=+<@@a`>0Ro?fSKx=3wfB*!Bj2e==Ca>Ca^wY~A)i#o}99&}W&y<;1{c)}By z*m@de-|a=N>Q~FB_}fF%Zp*iyZ6&Uj6YSX^b~nyQ$j%$3YTy3QR@&ExB-Jjw2Wxez zPN5f6x@6xP1AxjG_>ZnLTPU{nB~D25ZA~;=dzm#oFHF7{mJyh*EpL$SXi5J7mRaDD z>FHjv@N4#Y*Z%-$JwD@F_%bw^?PFD0G;K!iDDE#OP*3hxjGSL_=QR%+TWM`2@4@W?ZwdsRf(vW8M5Tei+{Ey6 zlk}?OSGkn#-R^pi?C} z@N(zjEKphalf@bfUEWJ9g`GBGW8s;U^2giOypQ%@_=TrGjqJQJ@r%Y2cxuJ(o$fqI zquQ&>3iT}@KX~r$J?VZg>mC-f_?L4gr}3?PV{WpSgH{TEc%=Nf2kx9>AaPjAjOtYE zRA$lbUk1D#@V@Q~e;D}7ThneWqnun>YPLE~_{!w`%HR^X>9?+Hr1<-QeN!_7s$ z7HSuf>Ne7bvLEn`!75A-1UY;d`Sd5(iupHM_@kj{+GXebBUjhfZ!&`g#glAO+56sD z;9&Eg#;91|%yhx1>OLeFHw;y#Tbt~8j(Qbb@;!0hrZ{&QYFez;$E;ue(ii$CfRBJR z9Ue;wjD?`Sw7+Fv_l#s?+k=XCf`4e8A^aVC;2mzl*HoI~;Uu=#=9*>HA#idL9T;XX ze}vbV=-PGHh>{h&ir!o4Br(CKT<0t3GJ1pSRKE8Ph+Y0&sw_Ez9HCMOzKNUWwu~&21l15>IOSk0i<~P z3pu9Kd^x1)`lPo$VU`;k)p+FFl6=B`<~YZ_a@udi`=1QyUuA~m+lRmNZmoJWA2%#V zMjzIlIC8xU_NnfVMb-Wfcz4F$ESpl7N!G2d=5ZzCLl>27;BrC8#!u^4bU)d%z<2t6 z#6!auamg&q&3H&VusB8c&1c_u_v2rIJP3X^=}_Oz;k&8CV(V&M+e%d6nIZ&~J+pz$ zdETk}Q+!00;k6sf+s#f1pEnU&NU_BkJ;LB%iqi6nv_ZN}9=W0Z%wGt+Lbq0TaO##< zR>UZR_3*pLC5s;4O5N9f2>c$8N{{<;_QegHdEGR@Kp)-*s~Yfacf|8}x5F*tJrl`ERv?0Qr-9}@lpX*$B_nrDMP-**(nBXzbB zDI=WvpTmmz-^5-e@Ex^+U+UV<ARq2M!PJ3;lV+N{kzttGlXlJQ@~p9Gt>xWD*urGIC6f*nD%37CwN*DJ`!q3>KS zyYYWlJ}@`G3+}EoTSaA1El^0Sx1eC$5Z$ZIb&rex01A9pcWd^27h1J8>9*==qAwKE z?(4gwFK|u{IIe?C&>G{$nsv9rj}}o*ow@R;5fL&%W``3KkYu4BgXUTGdFxJ&4~g^Y=9 z+T%^~Jg-5K!Q;59UeJzYYU}?17(6GV>z6(PwbS&CPDh8#oIzxpqwoH+I8*2<*MecW z)bv@kGaQ1}S)FpiJhOlZ9lCTi&FK0?h1|CHnwE>EG;v1K{jS+z-*QRGjzXO0(AAv- z<9GZbwpSC&@csqUl^Q6aytHZA*-65o&jj?{>08H{$8t1lV`BSCk5RO1NUz42?IM=A zOJONs4sq-&6T}O2kx%Aof=fF^4Z0;NC6|B)agOx{rSUrR#{^zn=xe6Q95@hZoWxg* z6Y6^t>sT`QmitcAr_%1Fd|bGAC1Jwjn&)mw>SykzcHa(l3%?Fq>KC3J@nz!3QjNLzt}^4qC3H(iT@VRw z1`;vG`35itvFnDFl*Au>o_P-G-`>GPjzT|45x#C5-bB#Qrbw6_wtduTO8J zYcV8yW!5gPikq{HY;|6xM>(z%?&kjh#8>lppIi7tra=tz2--UcnIYT&1-9osvrACkG1p_aE1ybStS-*;!ygfKEjvkoXp?C6n!Jr5lKR#M zJEJEh0)h`rkH)oiJN+K(TYXRBKZ3L*(ArTUXdYN*ljY+roR-f+TwagiPZVqS$>N<~ zQZM#V<^c?-9zZ&%Q~>*8x;+O$@V|(3ZF1gwLJYsZg=C$lduIOZIXi*RV@}MqCd+;m z@h8A-a~2*S(!a3hfo4bDsIh0s%1S)U0^4w;XCIAX_`c59RMzgTt*)9~GDu^JIGR-R z<&+@Y>4wX%u=F*ZuY5IY9Xi%Ibou9&=3GV=9F!a$bI3U7iq5k5Q?5sMbgyr1e`x?R zMJ>c-8#y0!fdn_bCjm*FUx$1P;g1aXOHI^Y#}*zYg8s&N^$Q&~?V)=zj1^*dj|X?= zI+}|?_>=oQ+1Y9~{{Zlz?)1H8J5aiQp=qkfvBx*}Swf#RJx>_N707tg;dhAqAv}7O z%>Mw{8hyNCTi~b|$Pavf8~PDZTl`?~#pHix@CSJK@(o)+68x(US#$sKE}`2PU#U*iXYyb)pJ?Gi5xUU*Vx5m{=Q6uxvDcJ#PHav!f9sx8Xwhd8!D zpm^(5@XfBV@XNy9E}eAm5-d|BnyfqRWR(V1WpxCr9+?O0RW-l(O0I0%SGe&OlYMa# z`BT~2UP$FO^&k=RAA09L5d2Z`zr=}V)3jY9PqMeRl_9sXwH96jQMh77$YYvI@LutD>F4s^4#M51a%J@_{BU;dvN~% z*midFFZOM{$CJ8c`FA>>nnF(-c=Z)SU+|ZTbSd>qOYI9yu+(mmTH8++Uo3I4=XuF4 zcK2RIYv`UJ(V?@lwy}mQ%T_OQ=SO7gG7^~b0RZHK&&^ss9`RI`H%+B@e?--8#q2>% z;f*JISqD6U^Dakm^sOB^5g&@iPs4lv02+8N!uR@quGXw2R`RCPw8J7@gVHzM$l5(B z=AH20;?AW$hp)x)Qr;Av&8}gxfgVJP*ghH-KtFa*qgQO-7km@px#4|I*)Au;2bx$F zUfhAqjT>Wk`@QP+q5DdF6V`2Qd^~MC-u!J{h>T9q1{~S9wr(_rrL}?OJ3@FdIz$Y zpPARo#@5FfAo1F^b&rJK0QCO=uV{t^X{+_ZB!2JA|HW0=Un=bFIy1*U3R z--o5VxA9h;W;Hm>cQ<#J5xKXFjAWo01p27WS=N7Jzl;7M(+$^$yg{Yv!UrZOto2VK za)jVBtAOPC8o#U0Ynj4wwutC0H2(kwL#59yr{a5ApnRc;V2}4_u*{4|80u@2ywHDV zpAO8LRg~9@dv3+9Z?uh4CxzX)0f;0F58(&!t1Y5jcoNNgG4Y?^uAzRrGJUqN`0Q@ns-5?X)}?9dhfBo3R{ML#6mH#Qy*ZqyE@m3|-sZ!!otJ zFK_m%Td4b!A_Ak1K)|IcT)UG>O`T*OG=BwZ{{YzY#c?IA)NOHn1QyEA=KZonOPhu^Oq=Z2VEK+}=fiRvTz0osNAv;Ck0Poiz2i+Zs+Zd0bxZj4&1=D46N*U6#Rc8%&<-CwWR7|C zuU64~HTxw>Ep_4W*Tnj?N%P4jrK=cYg@->hQLrcID;LKeFa3_RzYz(1Mey4CU>^|b zFRVMj!yPiIxwGhb6%KH3LX{V~W?#jR*`LIER+A5iH47~w%Em7ay#x7?3UjcKVmh}rtqAf1+_TRTp2E} zJVkRn6NAXu7j8$frpYJlOLrt%>>m%T;g;Sp>84VNR z&HGs1_@d7G>R1wJ;nL%C2bCu!S9f*kisODE{9gDWr)e=<_&ZLX>~13d3;Qsy9Ds6g zI*Rr^bHJanU&M>lytuwyEXE4L;`O||A3zRkFHrak`yY56Ep;6(%ftGzw4dqI*y?u+ za5L1hxyRxvp#>j^%A_QgjQLx|f3&RUNVvA}WO7+Wa-~u@(W4n1c&-ax*M2Q(z8g(K z%g58{@OhzPwX%)8t;eqR?$!2p#!m}=#-1IEPu9FW;jI$#+UE#lgI|7^E7GG=4F#(F4hi9_)!Ikr$HQ-n zKNI{9V%l!Kd#URd(o7r4^M=9efO#4971a1k;(x|Fi;K%I1^5G4wu4Zg_jfiOoqIf! z|Nq8GB?&p@Y=u&UoX^(DDQTg|aUn4p$!Qo%&gW1RwdH(1ABLRgEay|^xXmHw&2bo? z-~Ijl?=g1YxBLBmU$5(WUS-ezfQEn&{@#2G^t=}9nrx5Anp(i@hEh-|+>^?k`xNk$ z+{f#VG}^bJ=+A`u3GA=&bEUcF@{ZjNFCPw|A{I?uA4l^NLg!Nq4x-7}@HY(*B_}T_Etg1o zkdJf%0kvIP9Qd8;aR4Hp;;LuEqp})U|MVJc*<< zL)!Y2=k+HQy1fL_UiyM79pYxcEM5&I2`0?n%8|1fSDl-U%KP$lIG6$=d1o$^hWqIP zTFt!&xvz*!qH3OrRx{mfeSGI%C$H)4m4Z{x_U}|S^vCCBA(pgS9yhMwi>e%r?DJXwR z+c)5y7@D#9i($hvVFy2MK9*upiuVeI%9!b1IT z6F0?XD|y*Y!rwTTi4dn=iRTvb1Iw5X$zO?X5o%4PFc;z=omB_?!^h}9TXFaN9XxEp z3F=U`85aXWtU-HZHoiye^_bhD+f8e;5HXfsw)`zv{r`9`Y)H)tYmVto*6xu=LT&Zu zDssLi_tnpR$CK{qXMOP{ROge9CQ#*gbzh$C?w1*>fHYgY{@Xd(QS{e|eE*y;OX_HM zg}HOl(5A`>q)Cykf^i-eJ!e?<2lB8eGhutx;_#SzxgP66IE6q)lSf!4!&Z#g+&?fK zy}6HA)2jN9jvfYO(2`y?)8d-YQZ{$8xiPYMwI;3b*5bFMKCvj3y+Xf~R%o~uf2xo` zCr}^kRE+n=zQtnx__$v3lD+jX-i&TSF6U@Z*`xuYGGVr{o4qjZWnlP1t;fZKrT*J1 zhJhuiNpfpm6=i|Nb&Z{9pdR~TQ1!&q!Y&}yldnfzm!V14CYGlyYf=X1tmd7Y|F7pO zcAQk9Phde`rd#-g0FPutfGE$O%vMO zCD)U4+kRU=v1H+SIevXqyvEk$V$n-ix~}VQ7O=U+ugz(4RK=#-wiNM5S68m4UDcPj zPoEc{3)qaSH|9jCH^_&&&Ed~e%tpPYSTcI9p@(`9JskT97IG?sR=dE$C3Q9$C`5%7 z;GM7-Mw+6(qhcA>-*+$n*Xy*(FUTd(jH&RU(&VE9w>qU*l9c^p@?GE+QHqqj(0*=y zv)(Zxw?0<-*H@z%}bSd^FlF@+Y6E=@eX?gBs!%DH3?a0D! z*IaeJ8Zjl?8k@CtNn>}_dUk%3i5TF&kagS@Nqdh?@v|PbWcJ`lD`M5%=2>2CvF(5C z>maEBXEaGM;nW_3Yw`aReO^lqOf}6e(n~uV;_0dV^pswjzQvX1Q>qCRhA>0VQAf>3 zy}%KX9uki$%<&daMsXeuu8Z;C&ee8nvbtoCZF@l@=Qu`}$KYERLTl&Yx$2C`jNW(6 zRTdm7gA>p$mgY6U8B~E#VgN5_s=4uMOT6wehuZCy*AIh|V*Gx*G`wB)bc^WA(*}-} zMF{)%X+qjl=f1DBy?AipOWWLwo(uJ#Ww-9R)QxH|P#lTaV9#xSgnXOX{9g;d{)=@5 zk9zxW8HnY`?*3|BObE725xzzR6FlR#JDT~5H$pel^^yl<#fHNKyCi;iX>d{}9m&vx zaHm^3V;dORh&g_gnv zjO#|`h}@Q6G^9F-mPH+AeSPix!8tl|KbMn`-RdgrcdzNcd*PXn3pTZVd1*$?=S~Oql zzc`I4dmN+3chlT5pO;pcO)ib0!N@*c9*Fst=#qaqN6GXz8$Tyb=rdA2;TAQ#HOyiI z`~RcE+XzvyS-`1KZA>981oNjJfWQ=ZiK;N9pZA+AI~#@y-Ha!kF6WY0+AC&pifg)o zHp?o#pZvt`zY*M5jG0~ZG=XJ)K;aFD%I$TfiYl&)(MC?_i4%)Yohr+}BR-W1hEqFL zR#F$e+^((q9sG33H1I+6y+$U$z3zQWh{NRxw@=0+Oo%B_ciC4)s*dzh?*nahZYPgG z57up#79b4B$781R1`~mo)A+pja2@#)l~zD1m|H#GYOf$V8*UIg*U1>Y=f;hI5c&@z zJ!dsH2tn4x=L_;zx)HoW?)|=OK}bi^Qlp_TdDaCCr+17^@|pVfqvq0@w!hJFyk7vgXCtNUewUA=>AgaW z;Sc5WL|z>iy3cc$Q!GuP;mXa^_*;~;!Kb}A*WvsFrUPwW~~9#Vq~s2=**%HidxaneV_1JB=c$hHq+-5^na{vAm?w}Ni& zgWl(3LzKa`K~AcH9=2|@8m_~8>x%?LWYcNsUrSI=cV2@y0US4xq#V6>zWPlKO2puM z1w)FBfnzyiv7bzAGwgSBls8@`Ht-&Tl}}osPDI8dD{xCyMEeq7x(&a_c-C5g1V{j= zY@wqlWIiG%mlpr~jC(B_GyShMqux%%F-gT_S|u;k8g8%4WaSg$S@^pUw8<9HB*fX6 zTH`oMyI&C6jMq^;?-w`z;qXOr-?K3hEZMpZKPYhN4Sz)KVtn&Xz=M+b12^)8$xSA5 z+({c=eDgx?E9X?NnS|odffVP8(n5W)ANA;9ZlBKib4{vLw>Bm4Wtu$2q~+@O{Q$Cy z$c`A=O4^)tw56MIH?JTIoh_Eojj(SyelS{`p44GB0uRcK>Rr;H9?Hy}%*1EA{IlVr zY7zrpU0#lAnE1e1b6t}yB8H?Vz*X=g19@3qmaabyyQ%)2J;_DY zno;XIRB;04l2W;H3{CQl7Ou(i?Ku697<=a~tU!>82HE9L6T=#csX>>-%*Kc`CF}f0 z7j}5)p`GunWRWhzwUOJKsxs+x)3OBUkZ*X@1$4(XU8jjx)wo4tF7N-_l%|}fwt1y; z{BsW$N$L?inVA3Bq{zaaywZNgYw1MriW<53-jrL@T=eXYWuI$L$EB7<^`j0W38Aa>2y>uaA_TprR z>2Q9OIRIJ|;to2A`L%=7z@)5NxyGM74eV`Vx15zB=C&e`j&P0ad1K0hyAnl?fax`EQ)sz80n^k@iOp<3XZ8v zZFMo_i^O-bak7AkfD_w;c-zxjXO7cDv$`fp?3NrVl=LoOm}c z#r_u+{wo&Tp=^=c(I>A_Q)K^XYndV@uOg=0`zwrh+Zf0*JCa>G=lH6krrUZP#rVH$ zJLbVA)USlLoQ}f5a;$76fz``^C1h}u^B1D&4zx`zxmau`l9*_%X>8r!V<$VW@kw>B z6tJggDImg@kbFfjkCK#vhZRjpcJy@|KazI3y7|_cJ{6dxytulY0C`UFipMcaf7wr{NZ%Me$s;{v{D;#Qe+{UZ7Hean; z-HVUoK}(O@H@j6$Teg=}%4pCDu&$quq8UeWURwC2JyiQkj_09CP@{>GeJ+gkBiZ+6 z)U05B8?t?#1-d;|=akt@egwbpgaUM;iK@XGL}|;!$8LfCxQSE3SKdA2A3r`TcHaE6Sdm4Bc%#`u#J+%=UV1QFQ-`9F36%g8!Y@A zUt6x)+@4Iw#;Ld8A{kfI+@}0k`1qbz(8~mBf;2phI}@Nmh5%(-7^y0neFrCUU9z{oEEC6)fua%i59*P!5WijpA=Vy?dw4z=vs}afv;u( zMzMY-MgZw{2i8h5KL)XdY zlOL0}-I8T)d^I&1pjSDWw@`KI6f(4id27uzQG0+zqGv=yH2-O(1<>piV|XoIY*PtB zt1yb~T94XYZ^oycF|ofX^_~D--FN8_$gZ;1l-jg`+~-YZpp@P{MT6hUupp6SNQ4+A z8GGw%A$|r^VKtkW_dd{gu3+X$Tv!u|I(wpp=pzh9$h;F9nGt|RRfJr}xUaE2_aA?r zZBvP@0)!(Q9WM&EtpI?cWTChnOq@n2&iY9~3D}n6CsShOJGoWkR|2g#{-YbzmXLGb zHb%;w0xTCtRJfn|Y;#C8-0lG0WL=bEjK`<1{~Q)vZsH!RR$o^VF}$_nj4JjD`I)J? zZuyX6nx-M5vNcrjktebCVEH$yay^e`h(;ZRT5j6!`4bLl*K`TWp&DL15gDgHL6EVK zb2BbA$7xpgqU-P3?lNp!quL=+2n$rnMQlNyG)Wg;QV>TYAhx%ST*lEEg=yA2CnebWl9(Kc>GUrknXJLaS z!6gJ9V;d|_HjP(fj4Zl9b9T7NY+vI~!BgmV2@hSnqDZPB zCB$CIx!tqN0>o{4YT()}GNCSP#JCymJD#56CbrEt-Q~L9yJEi1`_J&n<&4I`-&ONh zVW~BC9M|aHoO6GH^dXNDa@(c&t<^iv*hZ^etV52JCkn6A@(Bb>KY7UxP&v=XCShugp)LU(ip41>)FnM^Ho5DKeY`05+7Y%IJW}fqnMxxh ze?CljKqlf0lb50myd2&d2>!r)ukc%LN+P>;1{ky}`yBX{es%OXU-wG7=E8%20=37J zp8}_v833>ZNTAP_!hGOHO>raiO{=G*yc#9^C@#rom1Dkfo{_PhbbX?{R8{lYn$Ppf zdy7<_Oe-sGR1vk={7q$2e|1^qXKi|ilk`u&R5V)^O%D(Js1Sg&-PNTfF%;sL*uBVE zpzwbBj$r4<*eoB_MfCnx;NK`|tcejJ`g9!SgkoIdf12X`%W^&S-9<~)ExMDKUc`VJ zKk_%vTp=$qDrbk%EDpCw#q>&l0z znLJ6d)_KOSeZR|gyNhd<*@*funa7A)Npy*h_HDyUh5m3JIlf|_y0+IG?l%}tGlLVG zIjE*{mhv2xisKO>GxrxhgrcIkw|hDY@1UUWE5gAPwpT!s%AR(4is%-6&~&SIVr$$L z{tg-lk5rvX?k1&k&jRnZ3p!L3iH+ee^(|YlyGrWc=_0H#_xCrQOrP@@^8Wcv{#_qe3wud+xr+M>TkkfsMk+OyNx7O|81^uBoJ{}#Fk&~0RSMnqN&AhlrH0uy}kNL z&)av!V}CSdB;RlNC9LxN106Wj;`B$c78e53YOx$jZf&s4pk}mxFK*Bzx@4I8(qFNU z;SA~$jni@YNsT2yuUW1ox$v)qAx{p!0J7Wz=PcQ8g5MVD2tf-gL#C835|TMDT!&|A zDi{+gM(*D+w#^d5x2om|avFCadr{HHuhKs6Se`4Y1)89^<}V^l78K6zVhqUr=yHM( zwEOKyTKlBjrR|qNyYRoR&ntosA0cx`3TTWJObC*dyoO<0iPoo_tE`-xwyyq?9L^_k zYrs2+CcdP3sR$YetqPA1lymNC$M9%8xOp_1-Olk!_u47$42n3(B-~>rn?sXfvA?Y)2s=eO$X5dwDoaf-C0~`XW zsWZqKSwN-rz+Q1K!aK z7`-u9!<2+&L5GJ*D(}L&o;o)Abp?SPAXAyRyY;y^%*DBlPY)wY@y=TBBJ_F$cF{kG zO)TWeI2x3AQV4XtHxmNTnAZthHMOHxTtwn&UFGM$qZhdIEr>oh!RiSavQV++?h3R@gG9Ggs-tQmurK+5!!yRo%G27Rwx6$HlS3^nS6k8O6-$p&BabFnY zvn?dP>3U=0{EIsBK}@ISMtxK6V$Iy2U#FD4Qe7&XJV-?03_r}b2Fg%_f1w<-1@r~P zq~0f>*97?XMTOS7aNBXsEKPjWX(NhTd+9|SNlI;Jw7YVoK^HOYN%-0O`t=7M%~%h$ zl^7zWra%#JcOpiJBTP#ux&9|d_e!>tRd&a2lf+XOe>xa@yARF%ygg#3TkV3DB76E= ztA9>S$g#5Wbr3KaR|vif7qDkfI2+J1zDQWNfsh~}A=HSq1Ju>MfY%Uk&%4jxOoTrQ z1Ly!0BNV3ub&l>VP~>%Z+Uo90_vP^{nJb=LgyNr`pI-Mr10nn&zpFnl>npyv{un&P zJn}Og&LVode=URp1?F7W*5==iHaL6m9>S(1j9;~!XVZWYtd$S|xlf#I^A&|CyL}?nG{qsuIJdZ@ zzaF)4CXGmXJdT8MhsDE)1`59uV#gap5(Qp;L4S}B>cVmltUZyLRWOSBTkcDqHW=Hh zbqnM*fak4QSa-XmDNb`mbAXby&wupuh;uP%o9AzmKv;IYr{Y0TrhbMq>_;%p{^byQ z@6YrL?(1`v8Zu@{v+i=ucW&){b3#xn02VIFuOyJq-!Thi3$-i`@#Ktwu8Sbyf zr`@y!pZZ0W=VWF|u)}9|m<`ybJV=TiKQR$BgW?XM%w)9sbdY4f=#{{2r-dtfitAbm z$28fnp??;V$WNnOd-YlqZ?{edt*Mzw<|=<|Vp``p_6gsgzl2Df zGO}?-1rWx4IDWH8(NK3^kDOKG-fq2riQG8hsnw|U z2B{MlC`}-&7k9=54ljLY$|dWoX5lI|>cKg$IF2+q#R7Yc5Nt(Kixgo~dYs7FlewCD zo=qb!SXtKTt{ZQ>!74@j1za9+$?_Z z?&CH2`%*a{lb1wqH^ev=d(Lwpb&C=8BO~KT|Drjofr8b8mRnF6*C%NCNnV9)o54TB z(hhlOfz$6A>{(R)QDRtW`aiX?bZb|nifhTWy*Kf5?Ce*1?*(oden&Q~lb%U>~ z%BpSO-_+l;^?l-e3K5EgGinQ*gqP zpClu;hCKDY>n}k>YcpQneBfVP?$52scVthDk3Afv#y__u%i#yO8WKcNp&Y2ZEsiYhgcQ&PWK4hxm^2P2NcIv?JFF5!iKzmnyPgJn4d$LS>mu6 z>q=!8v6S;^y2;+|ehg>ZpZ8#OYzkqE!F z*dO3tWI5gE=*~m#J70G0nJ&4}N1qb5O_*#sDP==U9=QHS!})J89yEs)Wz=DPBRWZZ zw<*D0RzWWU0jX-itgSS{1%UWnLbyB!NTpd)ElSwoh`Z+Tv!?`s9skR#XJRx}I8UcH zp7h99x@hDVM(@+VHf+s$-^HhdW=yv2vxd@bC08<~d>8t$!Hq03-CiFb+oXt8A5%P8 zh&~A57=O%ZnFq(wGYyhPHOBs<xOa|OSutN+61J;+tuDRIsO$eOKc<98+`^E z!Q?q)S8aY1F`t?)#FJF1-^uRRhZUzp3?dqoOaH!3)H)W6)2NgG>m%ech;)aMWu8xL zZvnolI2Ojvbp{4|#mG4ITK%P=uD0C)zsvu;RMClYue><3QH+5jHVRZ87;*hSy12uu zE*3T44+`PJlbw`l$c1k!Ah~zGbe#`03%de25FjIRac3o4JBEuIyU%1YM^K9Wm&ioEvF0Rc<8l-%(r5D7CCo;N*HGDce$gQSoQ&75< z!|$ZS!kwCIavck%<$A>!mtW)J*dD6A?38z2_PWtKXiLSMJfNl#z_BF?%`7k*mPA9f zUdfOY2;Gc|hWzF?j!RqN+1`ND2J9Tb(4owe8Xnc~C>ILJ$5$B~oL=U_Kmr;xWG;V# zxV2&7@aDJi)2wIHt!EO};*h4W)>2QYw-bMYMeorsB)&uTe?s(|Q7unc=FbLzTaim~ z`=|i`FY)HzwiPxlaaU53UOoBoC3)(|bNxqvq*`&7HSfM-1kIA-wrK`DC45g5-B?}9 zo$#?0vEuEfq61L}H}H_V0cSsbNvQ)A=%zNszV+ZhZ6mW?&3Aa?7dFi+txGUV7{rU? zOg)h&@I>DP{PdZKu2;vf)ct8NL*U>U*XZK- zV)+kk#UCFubF$y3u=AaZ`8xlZ4`O5k*DanM2o(@!0{FKmjzBecH~N4p`++BjO=?5w|!DW8AXlQGqLkb>p7!YMq! z?7BJ0y>%v2M`fp!zNijv0|-jh(T!@NYzNZiKEZbfWKYKF!i?1@331h=z7qwS9Qi38 zPg9#nYqiKGK0BUBA{>#vXlu}^!Dku@QA{T2@p+x1H$|zCp_RP7@?oT88I+D{tpO$s z;`t3Zq1gJg9`besG3sTK_c{3c!1s{*YPEx_mrRmm+GY)1r4%_l=Q$tAhQZ=|7@s0bbmV+X z1IR)X2^k2xL7cDzWgXF&UK_h9!tZR5--1i14)%?r-^$e*oBp9#_hRW)`S{@@jp!f0Uf=UT%ckyYasrq4I|Zy)T~0~a*mlc+6k2N* zSeGR6#?5Jid~YtKFTB1oS%TD`XkaACFiv4Rz~PtiFl?;wie;?SmekaJEqAe{_^0fX zjmY1Wq)z-{1kEg~gRD+^d~X`2oMU9~uJ**0Qw4X00CANt75RPmdW!poekGWcvN3g{ zfmHpi#Z6v`rI}<|J5NSl;px=fDHasIWOs>g`>)>wV@Mz)5=<=v7B4oMEybsALczZx z5ppx6^zLw+^2&MoEtO&jH&UIN4-}%3 zqn$w5{+dk}Wwdn%zsmHka{{j}rDUFcudGF-Q$X@`4WCr<^Y}-lQFW0nc;mu}(yG=C zV2jY)6*B%tR#xWR!)qpUH_yk9nX02&ZwE5b-kYGv+eCmn^=?U~*ln-$=Eg@Ac-(hZ z`YnmUU=8(j>fuv2K}(C)n|+cj&kh7cU8OQ?0^fALtfM)veAkm5b3^Jd!1u9VWUGg3f*@R#o)Aw7Bc!m`l$6i;SXU%#K37je|tpZIu zwxtSf?(c`kh&b6Mu$M?F4$3Z)wl7@fI@Ptaym9sipRfHB58^Q_ z_%2iO!1Z3j?$XwEpBBZWKeBSefW9jV$tP@je(*T!Lf@(#+B89?9X7ShMguDR=?Z$C3dL-K1>O(f~ zRy=xvGLk{Yw7bPOfysuOXSr;j8kB9{_1!GNH%e%Uxra1R*Lfzn_qz-4AEtxfv7|(2p0O2qdca_Z4s`I0k$+P7#7bKIl#X@=n zJw`%CY>v4WN}U?ndK*!#{Bw=OQ_GGCi{+obdFxmy5&XaRzSqd_1i4ov2UUdBf!gpD z$_fz^t|i(EBS~E(o(X~bYwGF~`)8M-4I(MxSSP!{KyGr$4sEGqJ1^$`iakBehL0nel+S<1&zl-Akx6)R- z(+dvjT^(DlPtv+W1y-pGjMRQ&Gu@gN9eD<8Z#6-=*bV2noe>Kn3aBh5m|JCPU-9%0?CP^1Yv_h>=*XTL|AQU!&a(lDDs!s}kN=qXL^Y@E= z27mV=9=5a5AF?MawjodluGC7x)zrZ1g;FNx;oiDbbB(*PA}bZ;Cz+E%>v1_e%qDM! zv#b&ObvulkLjV7V6%w~uU=vMZqi{tQ3HVFt(kk2CX{+vV5%EWuyZ*(%6oj!!^K1;Q zLecz>E*x--a++0;-{$63O&&`}=JZPye^Oa|2)X{Y`SJXLKXv|Eop>aYX<`1HB}|E; zleFrW5@w$W%VJo*6F2vDV-8?1ssFcC7kBQuXG1mOG=yp!O02|zZ6JxOL`V+(E8 zQI#0+sXLM+Zy&gmf0p7Jp(I00@J5L!c_v#{y5Podec7Gf2;4H@^pnuIQw#|1)t%oC z&f79?@FC-pR6EnM-^xMNZcjfd)OKH zZ*~o`Tf*6CBXpW^i}H>GaXe}=R)W+Z3f%b{FLlGT+qAWhA#TsedTg~7XYW8p2j|To z?J3Ib%rdd{;X!-y7rv&3msLgE9bHysZCQ$MHZn!(Qbb$5g+WQ}b1Vr(XA{)%>viMG z8)A!JjOAN5|EXp~?XJl;?}Rg&i2**}-YNR?$g zS2|uLR4LCwv)uvR4&6u*B8^`k-#;-;-MISBc8Bqs%7y1wdB_=2drqCw=#H_ZvQ0*U z@+LQ`7#s`_6zxH|#*KimPdkxb;5(YS&m;^VwW!hRH*+Ejsp`g~jv>o%9j5kt&#Y4d z-$^q`oBPQR4nMw`^;=X%QatB$t8ESpo0w%AVmz30{zKj1oDjvjs}(b~(p9kNcg(bf2+hD&IqU7dFO=4@pN<$OVO3KS)k324&Z3NRLMcpp?cQSrtK6I!8y^Q3e`VhC- zALY;Oa-Prw%+C!YDt2KaMXUVRY|Ax)<$8IA zcZ_05-N0>2ztna0Yn)&7?7O2$!e9p`4j4@SM;GmC_+(C; zzDO-SctaibaUi~=#uqy!{$ZY7g}Nk=;vNmy;!b-8HHXBr5MC;n4h^-N~LdB2R~ z^*7SCSMyw37|Z7N@KG%&_;V~yYN{iTJ{bK)nQvzS3fEUB&6pIt{rK)h+O95@2^QYj zo@<2QFy+t<(1{*RvXU~{Dk*s?VVQS0crz@ENSq}rKTrcV>G%(8Bl}@$FL~~4ihCko zQcMo?MjGx=SWgIh!Z({Zt)WNW=Qip!>%?Q%X$En`#^}~&dPN8qO)jnVQ0q!n8HFb( z;}+_w)L*^QqF44;!nf~f9uI5#nLK$hKHvm6n9#~Zw@cJXONuy6^jPcrLAnc_{wo=l zK{{cet<*0*IPePc31vGB4g5Y1h>rtn-@lCe;?b|wzqYOjB6G93UYcY=eyX$j%XiUS zvS_*^4SMx9>4bi5Vf8YxCZ(niIzR-GC)*DkiXuoSwI^6pH}{Y3idH%`6AdpaJW4BK3+_dDRxe=FpKlN^Y%YFeGb@hOw7}R7GZYEVVLA_CdDu; z(;=7Y(hg#mai|Po$jmD@NB56@-6^AIC}*6AlGJ*ct{gbDai)AWG*P*#rj-CV(OCY@ z4$5jLo4h^ms(Yc6zD2xz)h6~t4a9_q#_`nqgf)S{;a<%CUTEg8P_2$MY~S?zJuah{ zy@_hSjL2r2289UAb_>8Th%_O$-klQoli>MzSs+Yk(KweuF`IvUTv$>s7 zw}ww?i!w=i)67%_3ZB2ZX+uVOj=OqB>LH}W8}3~ic=l39>(yyJ`%A6cXRY%Ux$SJ6 z#Wls0R9rIGWOX^cb`*1pzxkTj6je;E^9^fx(j#0c}{OIOdP zHuqq7$ID{lkz2C*GkcJ8cxo7~wGyd2MtguNGc1|gV!vTyr2oi1enMGl=SrMgxRQFi_+=vx6g z0o$5i0G&&SPW(*E&iYr0ALpQR=kL=b8aqiQhJP-b&)&2cX3G7Ep*O*k$C5lV&=!DV_On*m>lU4Cbn60k}6H%<%PD`ypBaOAXIA zBp(_L*;Wki9mVAb6vL!EQE z$cN-v{Jgfm#+*R&h}36GHSnJTpV(esQa(e}Q6)je!SwB9@FFQWLq&mrQkgLHzv_Di}LG~DL*6a_jVEnsVkX= zQtZTe_GUYCuWw#@&5q4edjAe5Q)A4$Q^RtJlKR9@`o4!lor@Q)f4<=Fv(b0Ljua$k z*Q61_QH|_=kdIaeU@P--1kpBVvI}+wHT< z8INt>D6aOuvST(^r6|RQ<4JYN`EhXUY1}gW$39+KMD+Z{{Vu=TrE-YWV%J;MDyJsx z&~r_^2*+n+aI}VHA&XKsC*1vxZeFQde(VAMjkT!e}tr<&(hmQlKW;YWzf2qnf6 z%{vvecT^qGkNqS~sW~D_gxj-KW;Jfi$moip*_btJ;;03g_O3Gbz;{r9#7sLGx0Lc} hhZh>a5R^+3#s?#9!K@fy9BHAGm9x(|4*GBE{{Yk|QyKsO literal 0 HcmV?d00001 diff --git a/tests/data/ak/AAOYRUDX/AAOYRUDX_f000028.jpg b/tests/data/ak/AAOYRUDX/AAOYRUDX_f000028.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a560fe797ac4ebba14c7f53d42feb3ee5d728063 GIT binary patch literal 81678 zcmbTdcQjmI*graYk05#tqDJpMh!#N*y(Kymy$uG@2}Te^PxM}*_il*Z8Fln-MjbUj z-}hbX-hb{N_uhThe%3kb?6c4FS`ETeE=p27U@d?d2BN6&p6C(F9d@Ve&ezz)O3^U zOd?o?EZsx!@F^&%sA*oYv2$>835$q|iAzW-zEx6IQB`}V`%zEdz|hFp%G$=(&fdY% z!_&*#2juG)8WtWA85R94G3iHgO6t$F^xVAsg2JNWlF~o5b@dI6P0cMmy?y-y(7~a< zQ`0lEbMp&}F!;vi*7nZs-u?md?EK>L>Kb)(`yVbe0LK5qdXoQ-xJaIGq5o$Cod0m4 zq5C{_3=&MNmjc+N^4d6`-N={)gK=LdB>b-F#$yrEL6BRzPvTRs3d3I^|AY2F$o}5} z3;F+r?EeDxf8&Ay2ra4P4@I?&_ouQJLa&3f z70~F`ov6o6uPS*mtwLOJsEbKV##o<)P8I4|E)MmB6ZK`f!kk%P4Pd!EN8{ry8quVp zaW6M7u6`F67i!;E?ylq?(bd@gY`phme0Qm}7zBm+^jJjAb-v&|s;8z^)a5A{$jn?C z>ng2r28grMpN~8KZA&*TF6BI@cw~DV?4*4p0y#)4B3Zfx_urVk#vX&8yH@tHcv>Yu zWoLJ)99TU_k=4C^mhFcd@yGF-d~}D6*T!B6XRP1eR!fHWzMm`IBxa+HNNa80&qOm< z|6Fv)AUY4P3r;jC8&a@&)Waz>4F1X;Nx}^sxuk$fNI%>9v$gK`kwNj3H3RVQjt|Ag z9^=KsJnG3u2U|W!O>uJnhWrP}l?+ZKhq6zLoOkv!hMG&FG%A8Vp-$yf2DU;bqXquF z9mB};WSY}Xs*kmyt&MsvAdbJ5EUIdx!sU3V6cVW0-CZP8`?DeZK%9JMm@z0T^ zP20zV7HcxCLn30d0NHNA@>0 z{$1-^Mrg@Kv`2QW)fFAhZ^~gU?FEtlZVx$q3ygkE{#(4A@1?bap`Ayulq=}Kk@@&{ zX;Hv90ivq=*kofoYNvDRLJ_NdbThB*`SDvzJo$LDuL9bG^gwoFl)7xphT#{!DsY)t z*sxWxY2j=$zFRUBaWXhw?;jK%&bx+?VH;nGd4OS+Z=$u_g5*apR45=4}JL)+QOM@yIh2mb^qO8|M#oksYM{#4XyAt zIgia_(Z5rhnh^W8!;%yU971m!#vPazomZtmC0~BJi^N}Qb_Z+i34z1-cZb4RlpB2g z4^F}Xf&}dfHY6tbEZY&Q0@og~T*Zf@#`P#YOtAz9gQ=DAl8)asM@}32bYC#bX-lZa zM`Lw&m8FX~u?F(C>d?;uApCob+bk2Y9=iLfw*%3xE}pG7$NAiKb~Nbd3HuZrFy zPU;=75iZALrK~`R{Gd}Ouipc3<=)^98{{|LFPhF5DgBwJqGFj$jr?*X&~>@utb|~E zWr(er7-CT!)Tb2+^QtUyE8mBuAFxOo8xI^@b18!e#lLu(KNRoM?G3REUC2c-uuWE- zoZPeUE%5N=r94l?{qvsLdu*yd&{nn{I{|J6?4#s%$TzAxYe@yrxngU2=sB&MiJW*o zc)V*)w4W(g9kCKU3!VLO78udefC?>efRRjk+B;K!H3+%cAW@?WcRg^s{yy=UVk6y- z>cv1`Qr9@syj=lU?!5oIJS*fc8#AX+aERLH1-Hz*d5VxXUAuJIq49s)h2RYZ8yfx^ zPLivLZlKi{9g_sc5%MfC4w)e|Wnut++82^*NKn5yc0FP{H+jrQUpEszGSc4zkjJWSJmKfEKvk4)0GNBxx1CG&K7Y zz5nqe+}hw9pQJqXo|cHQN`i>P4hxnm8a?z~7HcBs+l1__;>3kfxir@`F~3S6H>pw0 zPbKtfCX&nt$LJtQJ4^YY(RY(kLmyU}OLbfzPVkoWCPz;D7^>cM7-H-)!Hir@27^K9 z)X*&joajHV0_PE>eVD=2 zSgQ&E2`MGn%YskJt^zZ9K=Cbwv3xMpekfOJ*~QAyOy|=dk=}4+Ov3YzUtn$+`Cp3z zefT8EEk35dB>X&$gl%;>uJ5YKkc04V_mlbKw>Aphp zos5YoNt^yeV(;4@vmu6I6Z`8*K{fH+L;WMF5o9SA-!#T|+4`RwC#$E|&Z1~>Vfvln zTY}3eDN3r-60clw>K(5HmHQ(yCsk{1ehF74n*4gvGUE;VXvy9h@yfbudGVPLtjv5h zy7K;K$g$^3IpTez##@=yDhEt{leBDhR#jz?478AoM#CBZn%-a9hoa0A8A}-ZEb~Hy z9um!Sd7VZ|!5XoY?R{PLe9r}we_>^i{Nig;!d-+{x+8nTeBju>D20YNf=e z1Z*d>g6cM_y}n}q+e(Z(E_-qDH|co-4)f52ef>(OwgExG8e8p-)MmQNF*P4OA& z8VgDwOI;St-W0}H9C4d#EYNdwL63cXONfB9KkB2Lcjny2hEA+^nb?Tx7?xcNdIa^r zHp}jdLn#X`<#XJdR1EUZM;{n@OsiGagJx3b0_*1)=JVGjmavkNYXv%b)uqe{Q>;^@3&j@tj`4HTOq>4x zYk2_k>YFN8+*w#$ie0~*(Z8ZK7lADeyLP#mDW_Wb0*lm(eM#aGiAcnQluMa}~zs!1&w) z3Ngt)OGai6zis90hCte9QM9G^y$NO&yI(zl#NpAKPPVX1=T}Wx=wBF&TOI}n>7wZ} zAq$nXefqTyfz;DH^|-ITo!KNBZQC(LMc;*a_j_qi>q>ru(Lsv*Uu~qZFD6GZ*^eW3 zOGj_(e&2Ryz7`&d32LL_zSEJ5f+;`C}|J2Qn4 ze|T!-g~jtjC+r%!!k#Hc$=$&O#TV?OKS?;oUHTQRS<0KkbsvI$9CQpI1eV@nX$NzYu5fHgK{N!Jmfl~efy3`1D z$6bV-c_sr5K)#{=#LluI$@<4N?INDw^c=!AqII?1i~(7(x*6V0Ok)R0*&Eixbwn}0 z#3zsJo)(MnAc8gxq*gz1@x5|*<)#k5bUp zP`R_$V$Zl&pi^&*Hk;yW3|c*?IO|UHk8Ex^(u2f38u~M1$DitIC%g&-sZp5?nqsoP zV7zNRfRqrY#BHT6kj4AOy)nM zczpg$Ugq4q`%Yv}mLg(jQ@O1!eU;YiV_NnB` zu+8)%=|TGFl(7-BqhDbf6>Z>R@U}9Q`vPkXeQ8x6trIu=w4ad@^=)7dYsy;PpP;pw zcBkG_QD&-SJt_Wm!WwxEd`WeBgZv+$-<}oY4%qi4UFmPN#-!@b^K}OP0*`sR+4xvV zPezj6xqFg4dj{Z;4|a}dO_}%|&(F87p%@QVWQp9W@*ImqLO&2yQPSE-7I-8CEN^4S z>QV(N<-mM{!6Hv{94na9nlL1DZXlAgh}g>}a^lmU0jD|F(-PP1f2EATN=KUCMEV3XK0%k+#WT z@h}YwqlP)2={x$ssjc zl1?x*>D_=>y(-OhC!@C?2~nN0k+#B3_1x{eQ&Z1ltcr|Nxvl>JgcJMJ#F@Obzheu< z&i?_`DxnOJ>fQQ&6bC?PLK#F5r~ULey`A_MSr{K?MXFYTEdVw~-i;+G z)-)?AE15A_#G!t9Ok#_;l-xql`shDENX4akJM*d5tc6~Se~bj5Y6`dg&e{E+qF;b# z%9_9^WM@bl<+=ryiG8|OSpPuyhI&NPG<$QjO>4x7;2;YxKe+IH4Sjm622k@MSA+*+ zAW#Tdup8-w@94us9jPOx>hTZlXcg^2?QXwqU}doKjH7f+c{;;Ot}4OZu0N=}V%O{>^|uZ2AQ&N*DeQ@WRo!FuE9|JpDoIxvznD zo;N8qJtw=N@j=_reL0Q6i1-?ShLp&MJ_%0c8*H;|K0igvBv6GPNs^sbOAN zj%ILr0mf61Vahy6N-jY{cma46vC00u!MNaeXHL^lg4o&daBu)$T_tF?hJA}}UJEK= ziZzH*Lag=IqI9(Aj2s}hAN>-l6XC2oob#oBE*S>e)wLcP9y!}IK5xVcu5PS4DVr=! zTE8b2hTv|x;7jbUR$r{e%F<2U%LnkJh81YA=!7!>8Ku~y3Us953zv6=b^Ik1lRE}9 zw!d9iOjh1Z4v`E&Ip60?Wg2L;*v0=4wuUF6gb zV|pxy+xfnzsM89o zJUXgqv&!w8hxv&+DZhBlyY+}it+L`J12vwrfZ07+G~DHs7t9C`(A+ZL=ceCEeO+BX zos@=lpb+HKRB)yFa{xLUh>5^1+mwrdd3LZ)fg{?(E%f|-`On@F5AI3ED?)n7LpNM7 zSHLu=Y2gW=7^S`ktFC#!l#A|*gyUtIWaRv>s+S0h`xo&8kCcdGNL=B)MGHpV&$M;rxF8JHLzp18-Rdebrk=3njCCTaoe)rC^ny-p&U5rezG9~ zS$6Gjc#7PqV7X0vv7NO(5gm-?h6gZ#&o0uX)5aR!X8O$Cs!pB!pGMlA`HHxwM3Ot| zEF+wZ3CP=i>?i#FPK_6AbJra}+|CLh=`G^YMGQ)so0645a(6Y5tue z+ygyXz+}lRCCyekfRxof$lqkJcea1j{Fb1Jw|pKRzBq)DurPh%<3f0V#`3Go{q5XYHUF_h zFXxexb~~@_s>gJ?jvrNfgz7(lD1paiseub;P(t+Jo=Us1m+7gV#A%LknR~y9TWMJA zP6kUux`L`L3xr!&ZN@HiG@2j0g{w%`b=hwPG@e=b8E1oK_^nEShiwH)&U&y;vZMEb7=|;V2E}gL_6Az36yq2U zscb>yMV{rn7-vpH+y9PJp;1zUmt&=gc&HuDC@FlY8*b`#I}3gLHeIuKqdsL5Jk3HB z#=#zo{ANF87neGkf!F$-*m%0x22Rq7uG%N#5mOmkO26tjuH+w}rw&2b5FNO4+9$?`6=i!D^_4OS zqv6?Pj;`UjVkM_S+Bxxn9DO&E70hp)`hBcw6Qh1N~It95sv6&inAU5W$g~ zBODJihN1#3c#P6y%7&_~RZ6U~nAK!PE%C{`He&kCB+sRphkk&jhO7pk@|{^?$OE z>)kNSwY*!g<^`ZQ%68gAvdJT$Je?`qw&svA(IiTiqok8{1)%{cy9&QzL`1mAWqicF zjE$YNK~fdWLi-gffNzk%+YFju>eI)FV-J%p!Ya)0oVtm!5gd8(<;!%h78eD^iL*Ojp=265%-)>bPnLa6IT?#9iDkuRo z{sloiEmDzFXYy*9VrSvau!{$+_xESG<5v5Iwbagbsx%7UtUf*S$4KP1J^!G-)SA)7 znq{-V0{B$3^i}nwm;6WNj?1vMK}U6S(^;~OsRlO}?h@;BsbFJc=a`^kbq6;9D~ob5 zS+Z$PT80enfsz^G-I^ms!v_c0r?wOsCGyYd@6m6D7E}Nv^U`ybRe7Mqv#Hr~cx^po zp@lepTcsnT?*$!wz&++BLWQUK|lT zf0<$L)N3g*(I(Z93LOnbp$C)(eI0e$7;YA(*EwPBh&X`H$cmP8J^MiDzC{*4)g5l_N!fv#P@FQG-dC3>kTc3ecaMLeiMJDB@hU7*Lt1rR(L_WRf(|Oo# z{{fDbRqX_!Ubd;k8CD z%W-%MPwGjzZ8P|=-~E0c$mUeB4V5(yL{blE1txcO=K^W?B2VhMk2x)E^+s3v7H!rh zMSa*S{S69~leP`SOW&Q@vd$q_4>(=yaE90!CH9S{fgkJx8UIXb6a=UB4oMZW8IBzR zp!*a0fNZL5y?Tgzm!LX}N0Xnwo>+vH=b6*k)o}BH@1fa5VS8!wILn3NhR;2j6dcn0 z4}kd<#dqh0!cm$Hf-}$DYgtkD>E8iIQfUIl3SLDevshBl$G*fY#3dCNr>-)ekXuh? ztPYby=o81u)e=W_FrdC2!mqyu5-*~1d)4ZY8iOrHBi^Tcr|Q_QZxf#vbgeXI+ID<> z+W!1d=iT#b2+i?D^(z%rj@63}TGS*wo@}qS^*ICWo$pp#oi(fLT5GDyoj-Twq%`M7 z5YazCH}D_8{`A|eMnU-f^gZS@eClEzTfeylLSXmN9%{H4@|*tJvVE=W!&Ypwf!0uM ze72?&3!ESaBdD8MniDD1oh^ehyAuIxM)uga==ReD`~yTu4-ZF6|8h!4ch2~%Kjf07 zEZ1)F;c+gg*6s)o;jMx56o#AnhQ>b?u5;GrS@=OoWR;nxdd5Uk33#m)h5mG6@nArM%O70rjkMP$O@3bJvsZ8 zKf>qe5Y89rcF#PC72Jt(vMM<+NRPWnm2zeyjn+EFTWp;O@>q&54_YskqrC@~fsK%I zM{v>P-zspR;l*LmtR{r!X6r4)gEFg?-NTB79-BMr$%ZX~BXV_oP@o(DmAZc2LvLp> zar{W_FlTU0W0vr_t-$+54D}0rT_oZ#IOuvc_^1@)-^V9`~yRkmJFZTF-;JUkBM9)9|yk z-f5O5Ff*8|q$?TDJj>52gPP8_1un0ldp-GFAcu$J{ z@Q=zrmHjC2}R7K8dbf8hl& z+70o$`rHgV4EYq_H(_nKLwS=9raN+WOD-t=N0~Wko?MXM6%Rvzv3E^_7?aknGKGj` zQS5hh8W^e>yS045%w#{BWyl$Qd1MX+=@CuP%Y27R#BB|cqS=?F-{ZFcRo>~uS*NWH zH1aolQu7Zv|4Nq4b5^a)7!akPVf^X>7INR&9)jhr%8VS?V7W>?m&8Z7Tg? zPmwiF9riTW!b$(e_?^&hU*|-S4x80~z9P>g+o%z7+UtSp?Z34MuSnq&)1I*m5t2IA z)YRNu_(X!5uQ0i%M|eSFMg9Q@4=GhN5{R>Mje5XcWZu*5?RaC{;~jYB8o8i54J50d zlp}YcDbGmCoAvj35(@srez%Np%Fr*el8)zx6eI-|Cl!_b*hyrJ-=h|USQ_uop;$pQ zJ82ahtM=F9xQ+x*pLWEYq)+bxd%77p1?*fuU1+NbueUTPi2c_^c@@dMGO8Y4MI;g# z-NB*)^d~QwsXD6qr2QhrF6?RL@b}LIWF1bm5n*-+wL6*5?JQ|c@GvOPe6h+@u2}`> z{w0cs6C^?1%pLMk?7cY*fg&p%2?ZJ+zfjr~CS_H^f1OXh-e{}+0~kgvA77SdLyPl% zPuVjtpmy)H?xkvH>e4(5BkIfGey@na8NX^FePQpDWFL#!RTh&#K1f#aHNFkEJ zA*=?p^mS=3xu}W`Nwv=u<=54y4%#@`+XLY~{&--QjW;^Yt2H8=jJ%S_IZ&-YQ&$TA3*%a=)Q_R`TRAnwyABnt&RJ zNkHlb9muipl7KS(upTPokf~8TJFey}Wvci}PA{SDnHwmS%QJ`u6{;vn6qgJs6DC{Y zOHuB=eq$O$SyfW$-xo+X^GFKXz87KqwvnUKYzRBMonn3wLEF~gJR&1CUv=+}Fo^M# zqudK3yBwAZB3?KoxH6M)W}!r5L`%+6K_uN65F4ulQ}3Ju$8O;!BwGBLV(t@x5MtsuG0Ig3?q zCvp1h^Ex8nmxk;{p0#-F{t$Y8sda=$?0f}F?Yq_6Fl)&0CtpqoUusqxiiJQK!)>UImg_P0`QjcK zL#D2k|77%75@-YJjo7$8_6nhODC8xMX2!!=Br5IT(C*Xl)Esgi&r=exY-7&**>anP zY_BkB=wrI9 z2a5Ji&^}uTmFmA9L<@>LR?4Hg-v1<#@_y4X3EB}*HaUI8jJS$}N6hC%I5|qWkS;Hn ztDckVMXUT>=hKm-89|D~Ljp0ura@8OUz8%MN{3i=VbCA35!?JKdW_)#G3zlEVn!$f z8{eW)qS^ZX#-C^FfHQ$1Zz%Y(oO86^)&*bXCyM*Z@FC-(EV-=#rRmJt79qzqb>^M? z64Qfkx}@JDh@$gzP`HppPWQUQ@m0RnOZUGoz+zb-K91B8B=H00#Tdw70^XmRh_ zd;n;6`Qx8=#s#u9O|`LX5YsI=+(4jOYXr4!Qk;N+P?WllV~3Rw)(cN_vPq|D^$2?> z9Txb$+jm+imyfrb2kINk~x$1@3xr??F=n$EvMMC|sZuSHbwwV7qlIeeJ>>-5|{ zjght2zb*_BgU=)ClN&0(N8A~kG1 z{je{Pxv@4()Mu~{IZZ{kyI>FlHb&hcvJchf+eB}3uE13eEJ1y9levkfTV96i`df)= zHxMqPu7Gb7t1*R z{VgXYpG6ys*OY|^kI+5_=MRClq_;RQvcF~;`~nf^p|la_?;b}H1HtUEB6lkSY?=Go zPi69-Hi`5{O?ndg^pCoS#zE*E1oqP?*iv5(B~JodQ;K}yzBc2E!}+`N!B(ldU_)yJ ze!MYhs{|eIg{(w#g~k65qvx;gRwm1A)6r{#>St{@&>dt<;yZFV?V_t3UG&DDIAeRmr!>zZhjbT zj?k?HW}Vn8xyMQ1{sVAO4Kr>V9H+c=y0XXBlm}$zHH&1X=qxWs(~kkR7^kYb(NEUc zSqXJp{HWW9@-4%Z|JbOk?6eXG2cd(F?%sk97%@P~J5=vl)PsDaz~2>25@k#vIDVUk zuRr)!kyx6-4N9)aZ`JQO1INutQ6%e(M8YY7w(VuiE5Jr(=|TEwcYqWZCk8`Zdd;*# zj=;WTP~2O zI=j1%X6I)n-ORao`27`tgt>(4jA-JBn00tzvajQ?m20Bu;Ap*b&H_HNf29E{gLDYQ9WtJopDyv9b61;uH<_LaAu~DwT*( zV9kk^D)T5I{*C1MM0)ckag-dz%LMjWPj0?Otrua3Zau7|O3X)HEZg+5rtSji8(Aht z-U9;P_#fGUH0IG&W(QZ4qoQS^Rg&W!{5s+_XPM9NB=!v-F>6e+7y-+<)8wCH8#cyT7X0(S=1MhKC{#S*X}Maq@j zCjO-6s_fm-XghVJo4-`!;`_MmGV7PpcJqfCHpw5F==}V2Prp>=*}ifY$m(r3nkpZi z%5bM|JU@&5j>8&!QMR)!puNLYkTr6(^X4O|YirXz(8a(>@aka7iR&eOQ(c?|Pp`(k zMTBF;i0G~kguqw5?}aN4lTT%;4q|4N0&#|?Pt0`-w{9d9v@*qBCCFeeQ(iXzR z??xqo8B#HWn=hS;^j=FjZ+=@sxxq)d%_hADMeyQb8+B@)lRHOf$z7HKfjXeW8$AsZ zt&Wh%qukG1E2enX$%b%*APzip=8B?_O#F;=fbEshzw;cS`SgcI}ZyTS$gPt!)R(ivRM_^L5Bc> zM8|MKb=wdprHux2b5%dhUlC4}$Y5Z5WP0^`>J)HFWtmRIYNqprh;}}^=X}-qjpuj7 z4KEAnnyGZQ&CK1OgH{vF%uT92?$Ei{A{x5-bEk#PkNs1}?E2ZbYO1TI)*LH|7Kge( z#`^f!d-=lUA-PDNZ2uy=4Bbv`ZtO9Y6l~1Ev|aI?ww9+fxh(6P^H0b0ijl!H>Q@jo zJVI#aT&skEz_3omY=Z5fuD^zklszfu6zp`Ah}l}=Ff=5PoS6n)bTIItRKrt#K=yIhZAUW^2C}se_XMF|5XfQ2sSLQiMqgbozjDnH^lzr5 z>fTFp7QH2$ZI|j^B|;3e*dZHIUv*>wo# zZLG>HRE3*i=6aiigpe$P!F&#xujN?|@)-jm3A+7j4Nz#t8rD4@%HX#-Y_A~)i*IXA zn#3!A|Ab9_n(1f2@2oVNZs}i23S#Jz^Aka}D#sajI$(KZVeOqVYIvi)FOY5iVxe3l z%SCzmcwI;Zy^_UR6%o_3`3ckM(1H(?#msdIGdbg`KwHZOA zaHyAV@`FpBvE6Ptg#Qg467`YBr&pbTk3@ygQFgpok!9uWA}b*{?5V!zTR}0!1AbLF zWZU7rm~gEjFb=ThxSaz}t^GXOE^yM;>SJI%`IqK)4Gm z@+uSo9f+p}E7s4xt~bevg1xCnO4FQVbgZ7qHBqrDiN2uY6*o;VXDJchc?zVb+BEGN zYuaK1l4Ds$(%*R0WiM1h(h$<6QOXGJ|5+7<9l(h_y$8vgSd~pcud*u$*U-h*D7&v#K*8`%vRXPSYlD!=?JRJ zaoNj^ynTlaK8gcb!G@Y->3STMsLZ&snd*x`wJY&5rkC_xhUtu5S!#j~Yt~a^FIHsF-L6WIWAAtqG zFAW?_IbMDJuBmb=6NC>36Rj_LvYP>WPbkegk#CP0NzWR!tvs{cgS=iOW4eo1o)1tq z82Rz@f`~T{sxU; zocRi>Ct6Dn#&4(_Z(Ob+Y-K^3N5Bts8kmUUpNvfzsZ1Ho#SlNwIXQpq*K{l#S zw0&IEyVEXbWRRn|`RdTC12%G=#?Sb6DPPdBeGh=p--yedm^^hPO@JBCWz6_K;&r5G zjrn5hM%k`)k(6N23De$IN^Y*sI$B`Mp_ddlx*JA`0Uaxahyb)RDV z>^I9eR=&21KkLggRl;wy4@br96`r?B%M+{1!?~bB2B9NLj(!o=*9yz0Elv z2KA-@xavf0OsjOJ|3H3Iu1+iBGM*L%?Db2H0xP*Wl&ZX-Oy{X+*`UgMroNR;;H0xC z!##Onfwwme{t()mEhy=4p2cKpvf}%$dX2uHPJFM(1#x|BoJ0BrEGJsKzCzijM!(W{ zMShy<#O!4K`O73%(qRf?f(TAm5V1OvyXT1q-?MmJ&v#Vc&|p7*+Km}bx*>Wv1{klG zU|s!9zqs8=>t!U1b}!I~5L|RLfMw^Dt9H=0kziQX2=GE8>BMn>D#z*Y4#vQ-xekfy zCsK9T^+z?YET_O}r?R|EV^wzr{u5$XKZCl**>sM{Hgb5-ac+W0QS4FjC$rHPH57L@ zGA1?S72bh-C=nm}?WYH zQCaFytjpv=%cP9wT;DDFEl> zgSDnEbK>S?xez;9zE;3zH4eg(cx?`eHhSDU??wmiFJx*h%$6Ok`X4zFr+q7I<45B` z8*W9b``0{w4DWwhjIbB86n|dEk~y?KdY70o@C~*f`0Sw+EK{)WUQQvFP19kB2@LPW zO2T}iUW^W2RB6oDnUR@(qx#$syY_DLZx-1zG)AEua~a@R`Ee0a>sULWbg#w4Yzd{+ zn_x5;IPsg}VC%$28eH^gvx9ra^G?QDqA$kE4#=9RdSa{_Ji!opR=9HFU~L6fMzO4S zM$10S>4vp`k9!ps#cRbVOlq}lbIY)DxSRD|y!`dzS=pc*$O{J{k`%XPAD;?;s_U?V z%{yHP!GsSv)*aa|7_1ygtDi$M8e$0p93%S~X#y9`*${gF0KLZ+S6>>U`FK_rR*Y&z zr+QDV3Ig7)5!ca{#2q?B6IL;y#F`$@S!PPug*b5+_~KVN*n@EW?sKbDq2EM09Q3aZuRXi#z= z^qr1*j(F)aX3n8g?K|UI)35C!fN$*y0m1bhC>ce+2S+uw6ftcY9>QRASDt_mM=DrNER<(sFEgM#u+)jQQ%h*~|Z?SKp;5>!=_ z@lgy!g8cAU9@IAO`3H7bSm$X{YY4^||%o-Ag2K&{bXSl$q zWjx05_ro8dw~lyJL$`XZ;Bv)SZReOqCMy|7Ee^Da>15bTZlc_8+`QV5xCYU636>XI z4g|aD5fWk^)7f1164L44!vZ#E{{gIBLS2vX4d}zrU_+`va2~ zxdK{`Cltu76!9aZzhYzK{31VVyeAXuuG!CiHuLz>92Wdg;J;<`mooR)7TIN$Cu$e@ zS(XSaHY@GJQk0$WvpmmL&eydU@3MO*=D_dcRG8M&KOF11K@>YkpMFpW{l0NUUb~Lh4;m zCXMMh5p@pcZ0(+q$xLkiZ0{}PcPWE>kWg<*C$XoD%EvipXVN))Ouyu`H?OefJzryB z1h2?iEIOSo$QnlSIJj~ zRqwrvtjN#OG)1dCvJ0EW$w0PFY{(}ktu_&<&}pA+;@+C=PUJTh3O{z&A~ijL6-Ch>?@vtEWL7ZAn&sDZAqej=P} ztKiO@W%<3)5?B6PPw(-AtX~QJ>ilwCoid2S4f<}TrRE3gxey&59&E)R8_Rao+Dez@ zv%c!1ZZ>fR>nT}=&T2F!WqLltJLx1fzG3`KJB;A$#fnohheq0py85P{%PK-&lR9QO z7RF}fygo(@$&@knG=w|r9N=x;DP}bc@P369$rX6>qa2Am`T`lI&5slV%366=`o5W2 zD)&71m_48Lvz}=;i<_Q<^01}(`w%ve2;kY8nl6+57#dW_ZpI*&i}W0P*~DTvGbbay zlsxll2S@HJAmUkDh; zls&ATkXLQWtxOv&nu+c<*I1xAcJwztQD0G(57I97-Ck$b1Idrfy$o%f1!d9t9Q^qmPpn5jG?c8V|-lYnQ<2z zPFXs*PlGVQjHmzvH+%_Y(Vb264Y==w50qSko1X_4#6iKHPf%nHgy1)f};>=)tH zUMB)NxsvX=JQ7ttaW8c+f+?J&^y+8Ur*w3;DH9hzTL@WIZb7N^Fwf)Pm2L779Rv}+ z7CyE*qIeGFnJJ`dt0wAA_8O+xN*n=Do$6BOA2}U)N@Mh;_}pgyxh<$~ZlI_YjBGX= z#=hquTDaY$KdeTq^=im%-1ImbVz)JT;#uZ9;cUo=V6Tl8d0uDB=4RE;Z{FZf7IUax;^N$h9BCn{ zEM}hg_kL?;4Y3PUd98RD>*Y$SEc36St{4-crdg244Gl7lY3msteQv20qx97mXz2?s z)N1&buKADR7gYf$gI{WLJ!|!tCQu{W8t)%DT<}fHV&t_#9tNa{?HgopF3duA>~$d3 zq3kO}F3N9*>ez_qTJtNb%o!5gtai~;%g9t!vMuD@Bk7f_f!!y^omJha^3hxk`C{t)|it#2l( zLkPo49P-1C10L*K867yqM1b;p-v(>Cu9u~2ce2A1MI%gL0J~3&ka7LuN#oP4SnxD) z>akl}4Mj!GuAMQuf!{8UlJOD~>Ki!rHRxKllyrMYwaIQHTe*agv)g2{#s*qK54L`p z6~_24!d6kW&Ha{=*6nwx{i?=Cawce7<@uY_j{I{|1ju`ygW(Hnn;k-JKR~*-X)Uzq zl>;f-Uo21Q$@+?~t7&#RR{Y)@0PFFy6rdl{v6Q}{C1w|*h< z1e;vVJo6T}rxgE2|(zkSryKBoOo?v5q;m{@kJ@KB^cfV6kN8J1{vvxNdO+AKi zJ?V6G^;jMGTUN6(EJU=o;cK3*2nnFH82q1uXC(^$_ zegyb$Q`B`Q@aDg&UrnOgT)p+~v8mko@2FlRYn{Z75D?@6)UUOCS^F;7!F}T8t zpHZ^ZSuF1)n))jRu+yX@?}`hEUE5}O`5jab_kF9}G}|e(-wn;G+TDl?tfqf8auD*R zxbKSbj~GLzKZ`WIYV%n(NnzqUTYoC$F_S5iVJdp4S1K|6;!jFXKzf~h_rpuO9}8LC zMF@L%1+>@JF^`$lMv8Rn0y0CbP71X|3>{jU5z{{RvG z7wVn>@QNk&sDdllfsikdV_-)=-Z|h7YnJ$B@nc;0q42)W_fNgDx71P1jo7&}V@`DW zS=a-D*cr+1#w)7uCbOk@>q)=yzlYUj)UNechIwR+`EgB<+i~4geznDT13>YQfxl>P z6Zj>hv};{**>wiAw~TDMbMo$tpSnRjj`c0tM760)#y<%BL*tEV2D$NcGF-;S&f@kD zzk)f&I19I*Ue(F`QT?3k{4;R6){A6cNYk%HwZ+Zg-RGl#44H0s?!Y~OHT6D&;L95= z76fZH-4XN5*vXRTp!^5xM~eIs*V-hX+jeO``ZTKc>Ypp+PZ;&d$UUmkRw2Q?&yqYT z@Q=dw9~pHI3f*bYSx0YgZFi=mpgeMJmNH`AI)ZpUmFxZ>(8t0*8`}I~*RC!f!q)ef z7y9Op4rDrf_-IyGvw~BgWx+VXt{t?`5L*09y^Z#^-GL8?bmJ zSFHGJT++OIZ(-mKW@oxb^DXu1FBB`0oQ>f9)+8K{TvS9{tsN)qouf>@3zNqhl#17$ zF4V4di;077B^~y}g!jS$^si6~f$}&aygT-E*Vo}*t*!pd+SaY-9}nKO*M?n*%(`sH zb~)>@i5RbChIX9;4w&jkTD0tR#ING9X==8fA~G;KeZsV%Pu`7=)(6{=e+tu+QME(1 z*;p@TBdGPNm-did>66>skQPW;58^8`!&friczV`ZnY_@3Ifi;NoEjdb8>^_f8Am;8 z39e>##AoYKKvg5$#PjP&B92ETcOH9EDZ7eZZtw5ZP$wY##2>9C!Hy-`NzOV|T&p(h zahd@|vq5Dpc8urHih_4&VQabE2;)-BPZ$EBxYVtz;@s&hjK>O*jFNj*bcS2CSv5H_ z1MWx^{Hy3ssifo_rCkc;^vy;~tN04TsZ7#ZkT{9(2G;c421mVCI-yUAGh0QdMlIIG zE+vJB9)BtZdk`{yq*dK+=S|e@<oU`a-|#9lY%?(Ub*oT;tjXMpAU^nRcqVX|8l%o>d+_?SRW7pK4 zz*a|tJ{UpZKMJkii1iq*wOvn9H@fZ1NB}o3gcU>m)m2p=?ogyemoh##{j9zh{6^E~ zw)nyE68Z_R_5Dib-u4?5{mr99aFMUD85!;4m=C26ES34E{XX{YkLi^cV923@!jLgdnNs0bzK9ZG4@{kKutdP?IF zk75*>_iuvwex2|y_Eyp^bw%3_jp37fczR)(45#&AE8s62c*9rm&x!B+W3Ak@*Lsc1 zO*E2qhkDJn z&Kox)Q^49`)cj9oUclTz1;ldQMmWr1lY7gX>~9Ps0Mc^_Q!vE^j{e37BP5Z zNPBMy$6~T92xU2W3qlGN7(XuT=LgccD@yLj>!nYYB_9Lmg8Sk8`lhw7>Z=TzfrCu6 zvjg{p?OAi(P6GZbS>81Gb^VI>mpbmV9klvpp$oN`lr!vclB zjT`t`hi>?+Ic@7Jw**NHUy zF9T{8m;N4EZEWGx62kJq`Hd%yfp>N6tN`i8MMXH?qbk}I^{)fZ;md2iC8D`$wJ3hu z7uzR0k(sce$@xc_kH8A@e~yHi3Z~#RRJm*{Ro}r^@b}?$U&lH!|pKO(+Tx~|^an}{fT-w}Te&*e`q-uBM za0YsV>0XKA@NTZIBA(*b3ELP1{{RppCmdv-N`t`=Xj)f?uXPPuUbY@x&)KaluV%wz1*DNjd=i}W z6%ur zCF5#PMm4*Zk7BbS$c8XKfO_}MdA*jKd8*!~pQl+#JhHG zGyWg^34a4eHEq>FbTZsl$ zu2q%tPu&WBZ&SM!nXf;CZ*=(dy&?ukA+S}4&47_4(knuxK>NUNC+mYmRub#J3|< z_VUkpGuqi$M=}-;>dPFL3`gBLT!GsHh~DRSE#qUO*1Roq@b}@$c!xz)xx0qlW4iO@ zC|M5GQ=a%@2_5;aQ^Fq&be|S#Q25(abh@&Rb%xb%o6d@M4h*tJr=SIPt9RVT(!F2A zJ{|u6gwsu*Pu3aj?1t#!fPbuTIP+Na1oj=O?wR4O13>Wn9vspwmfFtR*s%d&!zH)Tt^6mT z%s$pFW|^RL-Q>v{0B2%(-kES0rb7GI(KeP-XsPz{?R9xHtjPBRh=wq#2VsuBy(^aR ze};6e55-;}xx0t$C>Lsi-#?iz%<8@H2>{cKjOtUnJa@%f7OfT5o1^F&Z`k6NSZ}98 zA2ASuHYD{eouvAZ)$M*8@YaWGrbFS~1~~=2wbU;FbKlGPMH_5zw@Y_Bw+wCtQ^nBceMIIV>{6>a0mP@n--blp9Xq`15pm9kxz zpgn-A@xio$PdUjo1ti!>0!bdiq-0~QS8XCR-cW<)3_fc0SG{v@3o@|ZkC6}Y8V6>g zbDkj4Et#w|Ne9hwIS$S^X!Dbk*m3ygxw&I^)UL0zhAPs#qzc32jZDBP1JD85yF0Bq z_E`nx^vfgM%8EW;mINI@s%J@#^5*_aJA)jy&ahgr91>e_&#yEGDSka7{3IseEX89U zU}4d_?_$Kgeoa-W4!f_cfQE!3)(2cFd&BnbV_N@Q;$zsgN}=~(dFx$3{NToG!2 zv~Pm6`Q!5vO@08$oNgKRuhv~}!I0nR_PTs_-@doFGD_;dFj&C>Guw7~ugMSDCt8EU zf3%l^b^Do;IPJVc0hU=5l|?56pQV1oc&Ehr2ZLjS#9HR59i&=}fnbT7bhiq`?b{mV z+dW1Fa@EqI>R{rzPCvz(74D6#39W5B*ksdfn61APAy*&}IvVb@*(|TDQsM=aI8Y>6 zQ)?@C2Dy!L_gC=^<%fuLZv@=Qre4`xY0G!0-7T)^l$SyyKRElOXBDI2e-mrBTGg(L zt9WV!ON%igY8KbKmQpdfc90I`<2m})9Q>^9jCD3|H0b5hTFxMF@@{M{PCM~lTk)U6 zqgd18@wv5tSWT!|29eZnwN;yD{{W*5R+Ra0Fl^)0k?1Ro_`%_) z)jk$#Ivj^|Hr9D81~4RrI`>6Z52X16M;A9xCw=hxn;>r*Yxl{K;vBTAbJdV^No z!6(XV3U>_Fo5Ex`IUIDw zSGd#3AVv?pNd2KpYA7ojg$Jku6;?}heNW1D%yQbD>PJ!=gZWkVvAG0=Q^%mF#+7v1 zq1_SMiG`D~?}};I9HZFa^gTY)UHzan$vjsB%Xg#MTEB(_i-@DNc*mJ4ddcPzta}U! z>U>k7THUk}+@jBOB&=bNX90GR)6i!m`&Ca6*-vTlYr+?ig0FWDqhmPR{;@o-HGd|I zes!VZuM%lq90_}J!S5tf4Wt14@*YmmI}W6tzKdARtW<0zI(O9@SmgM`A{To{#@p!Xg6=Dbt>3U}cPTW^Wp9`Sr}M;X;L zc8*0shG&h3%-*L5tyE-j;v=Qcn(g)!ig_|wzFps1npii2XE_9tt?5ueW!thW5WjeP zdew!LH}1>i4u>2X(Bv~Gx5S5NJRD}5A>D33>JK%kbs88;s9b@^6_qj-EHU@HQ6;FP zxsec3zb|^Jb1Qw^^*mLn7x!gA=M@53%GqXo~tTS|u(?Kc#lx0sJ7|JovTY z?*~g5WWUxRibQUOH-g`wHOfI}AY<~H_P^LW!*8s9(>@(78O5W`a@=$)8OQ5Q&qI1K zZQS~E_WuC*iEZGIg7Ew~(C!3s>yst3N7ELu0k{uifGgpxP=$+sQcr63pWC~~l6c?a zz0H(Um$lGsR>Bol%I^$XM*8kzKj1at_p-CH!`HoMbuCVc*J{YD;Go{4uRUu+!9E+G z#r`6-@bm~-?xqTMkGj1}6W9UN_N@1euO6Hl_J7#>!I570o^Kl6+nFsixZFyta3m%& zO6IxV$37kxncaBW)BX}me-HRh(!rp!)9s2tk#v!_sWg&_ofK-!smLuL9GocW{?&1MHI2ou zfc1OnZduYh&5oQDFC*bf_s0h{>OToQJ>h>Gc*9KanSk2c==N;{Guy;G$)=YKz_IJJ z0zQWo5M0u*;~TS|_^0AO1wrvE!q4Gb*15LQk{6m3LX8+!2X1?UGDUWu5p|tI#_ZlA z_+mYQwHhb*k*irH;u zRYg)uGoI%kDEqv6^u;;7PI;HLx!Y3Iwb}F?J_%6U+Z(H+@|$ijRSB!~_ooOw*~MB5pmX+fp;(bd>OJ)Ak`yQ92nKzqd19Fom{Iq7q)^Ao7^bf)fCs)<< zo1Gd}SQg&xR^}^+`3lUYIFNNaLy?nQx4~Zr*z1z&+KWiiN1?lo49o(&vOBDZN}hnP zAoj0((zHzuJ!;bS7m-*-!g7EJ<#tmioKI46KMYcf)b5Ndhv}Xh()67|80NA607%nT zTZWor_fc>oP&xsV+O_7=W*-lrwvs)v`O!|n6mZQh?g!&h!vlD#;$2!$L|0cU3c3C` zV?Q_G%n#v;>7;1S1bm?3SFi)6Y0PYvf;1~C`L5e{5rOYfv_4}iOi#=FBc^?7MmfQz zw?mpv&4NX1%UQ178bh@se1uc?k79qqu=QDE(ELpe>smCz^4XFoVUH$BLCy*HIOnx) zUff>AwJa?XMm^+^fIr5o*=a^SNBe62Ws>a>#Th|_i4SE5x$bH?fu(ba)bty_1}N9H zIau6XM;ue5Hsul-GNhjEmOS(Taa~+`OqK}^yd!0@{{XCj3)uDko^2)`#k+ejK)+ys?CyR7)sYz7#QZPs5 zM=)}tFv~KBR{3O5>ODc^RH_1vqoAy>2?^r2xbXeqCS4|fvxyJ!#CeUq_~Y=cfePec zQnO~9tYZ_w6iTTe3}ed|t;3bhJ_j38V9Fa-kYM>n0vl%V?L(nj;J<`Oa9 zr#K(O`cm&vkZMrUAysx6A*k6m{PSv4F=4lH9Tzyw zewF+$@%P0~+2_RZpW7qh6|{2dk}Pq2L(-==u|)g<9Nm0~z~Ezn*1t5q8u*fb1Ngs8 z@m;YZO+QhF;Yj1|xHbCc;r{^I@Ai54rJ%*4TlnKmy_-_fci!o@ke#iNH%3DYG08j% z;>GDJqtw7XEO)*x)wD~^KS$J({cd5o)@@qWc;wu!4tB_=xW|7(NvT}h&E*)c3{zX* ziIKqIfN(}V3X&_EztOB;!}iv?Qbz@~y`rs*a1Y(WpaKG~Yz`|?ZW?x0d^bXJGIQRt zO6Pwy*?RZH$u_s;#EcRcw<|H>w;plsD>KFSGT-S=$a4|~^I&nrpp1TFsHx>$`64g} zs9aOyK*P$GFh)Va98-G>n8wg;47zo_mZN4RRE1JTc=?e301IcRAXe>-wZ*c$Mqk|l z`=kS>u4=+VvM^l+DY8sY%8DyDG_)lVFbsqgs01O~X5Xo)z>xG`(x!^uNgss#X>+4e z6Qjd;UTw6>eRt(1u z4?eX=NbvJ$D{~F3<<{c$?gsPscXc0~N_v#_EtqcDg3<+Ftt^(&J-LQ!jDmBJDZ}kp z+R6CTa>sIdp&(WB3dqm#6KVFAHn(eUnJsT&@}w)m<$z=K+D%&2F83mch{0Y*y;1R` zklT1t{K`pow0|xo>dlM{em&`$B=;BkHMPyuEgZ%b`$TTIDEQ=3Q!_}j3cDYhtv>`D zQ&mn6J$h44<-r*~*{vf!=9XlHdeh)`iGdB%twIZt)4fYE_ zfNY+W^>R)K{Hd^QRmK4N3Vz|i&*xH~Q&nbw?js=&UMa>I58gvhidDuoaaXPW&cHi5 zgYVjJY}JXMde+cJTkdgiBY8iAbvizq#Y!LI6W#eZj&%9J9MiWv700~P)Mn4ER4q(t9KS)t`&K!+7_Mj zhAZz`^H`RX63;+-q0z@3^~_ zrkRiTnB@NeI_IxjGlHj}#cA9~UD@~Q_=+$Q!9Ezz5WOSIHj@d>?b;ZyV`861G#nQ`BO&1RM|@Snu8 z_*X>nE|20lT^~ngx+y!a3Ii2TPb3!r)=EhxRX57^GCn^1M}x(m5p3EhJe}^u?kyT`SnKqEQ9n>)u^G(-_ZZ&_08g8R%kTtEg zzxMrF>>u9UUC90Y6-UYe?hkHj=}(A0A@Jsb;cI^o>YAO(toA6w$_xg=rMEKxa4InW z02Hn|uS3gzEqIGc*L3-BJVU72M-|M$+BhyoL=dq7LUqAl%nx&tdsokpc%#F*J@mdK z)~qgK)2($Enpm1p&vu#IBgfD#anPFC_`~rp;dX-h4xiwSB5h6XrV{Gcmw>2wHj*J% zJOom5eQU=&dE+DEyWuQRv~fhNtcozLE`DYM)1V%;(HJ>1%fwVxF+63h-}t-4m;Nf$ zk~rS}<(@laIS|F4kv;G$HqS`byhn7pFNf?MreIjgIx3E&{VQ9;UKZ9oU#cdLq(aQ@ z!_9EH5hi(a>?`Tdg#Q4t# z*}d==;FMkf_(cxAqj+xaIPFpiZ*Daf0P03ew5^WOkT9V48LvRO@U4Z!x>J~fIVy0Ch)szzYQU~x6|g9+V@9CkCM_qVZKY*Md*9(uDV=m0!cp0VH^Ui0D%mvQNmnC$MY z1c=Rmp^`=DKi;i>3iu;gv(zs1_%v3CrN-w_zr;~F&u+QFrR^e(R3yYoF{YUzK7*Xm{{RZ_#enaVM43Pzd6$YxtWytVBNY*k zQ^q|hOj#bavmb*shi#0gr?_UT1Hd}yO~GSf>~lkuZMh!GXEq{1&|;pJ;Esm2&w{Vj zg2+Zm>C&#n@c#f=RNwXoaq6U0b4R#)Ib7vs)6C={2XFBVb*XH$Ww$`Kw(5vKg+>6b zhEEOZHnIehOq3sWH8Ie2O}P66gV*IeRYp<1=87_X3_+(jPFQ_CswphpK_PN8)Ee89 zMb(txY4Y{{?Npaa)xbOKWCPP>H&q#XIT#~JMAq9^PnD#%5ljBo4oCB=L3WH)rqcBy z+*r;30DKyG(6xQL{=p_a#c3LmW&1hyGyY;mGHJnEb~*ei!fCf_F3%|LC#EXL_8y>q z^bSAZ2BVrH=_Tljw9p z&ShUPhQSi z+}GI1?wUJhk83LW^`?tE5HdpjcUL3-01H3E#1N3q{{TV-J$@x<2!>N>H&41)RC`or zsLJP2_FHK`)nI)pNNqwe7$@+qYSZFxg~Jl_%L(p7pIW>CI;upgB$Cor$@*i{pldTknsNI$1 z(^!nIdF_gT*jxtWkUo`iMv|H1J6q-VZUCwp=fqzGcy7tAwXYReLW%}k?MTN2`U9Hy zzv9=&e~2FtwJ9$=MdF*8V|fcLy2@1$b^&vY3g9I14A-TU%;kDywrjdEli8fU)-bvD z{on0#@Ji4jxA8o)ZekA0R{4Ium44s$tN0-#W#7k8fDn~mvq%ei4p)wSYv87pq{^2+ zH+onV?WGbU5_8m5s%(At%6h&}y=s!DE`~U{lW=x2VbeD_7yC z#GPNnCgR`XAB5jRk|_5j&BNjeIpC|Dj92HBJ|WjF?vhJ8`4h`I+Px3fy+h%*?Nj2v z3fW7c>$4@zwCUyTpDo9yC#6KE4Tl=8_depg)bs#Jhs4@@Ay9IcYIy7GP8P7-q-LTKli^#%fpvGd ziYdIv)tH7y9-e}^y8OT(~g4W{ZB<|&GhsL`C0>5BOK{t9WM_-*`G z;oW+}RFCaaSv(V35&OkZa;|^bJmcwJ&82?V-VoI^#FxgN8C%wp?PXaL5!_jZ05nM5 z94X~V^~HH-{1i{&75@N0rJ^zCjCxiC!}o`1#!oy}z4g4@t~*wA)~<`V_UlS4r=uuc5(gDlZ@ZlG zI&)Vpm#71`6hF@Ix%e%!oSw00`-W>s=nb@hiqRn(J$KSIRv5 zVHCkoWT-g+=hLllzBbi7M|yMs_t$Cvz{t z4I=kkxz+T|HCWo}cM_X>Yn|*Zppb$`uRp{2R}1?^{4MZpuf-iBQ<`bzeO;W`So9G&(vou@pn(z;)UzA^DvhWseDL2cueCX_NQ?_KqR7)-EkP-?i-aF73K=<^oqQ7S^ z*w4hi0lByMk?~(ep36{cg?%4OxrpW!Qgbt8V3XJmmEAdVqt1;9RjQfn{{XQM><^`U zC$|3pgpcA>@LlQ`kVSQO34}7+hFq<&2h%xkQ^kD|eel0d(|i?wZQ-pF+5vtR1uZcJ zV<&D%JYXp#3iEp(9{8fh&FpNoR!E;4ku1QJA_UTiF zIHhCPG(UxL=-NtYnpJ}vYYRP=tY*u|0Q`Xdv_qilR+?mboKQ1K8-$6*4(@X!ANP+> zrFiK0k>i#B07kU&Mbd>(9};%-txG?R{{Rte5Ta{oCjcpu5%P~=STycy%@(#h9}sJ| z5Zm8J{haptjJNU26r^MjFnPydIj)n!dajeF>DO0k&nKOB(mb-E26o3S$lzzCd0c-O zd}$S&@xeW#J1z8BLrTEX_ zi1ik*)HKPg*-sP2E>|?mzl^>Sj52@1Ln{X8tV;}6gWcZD#FxoGTBjw+%Qdor(D+td7U;Js4JO=IF{V93w!&MW5xkgN2m#PZ6g%xuxipwzCe zd+qMOrlfBrzle0%(43d^K5xU)s#yGA(X`nWq}3uUt&QQcfnFhNDUBIIjFaA_mQfP} zw0>OFwPKS!)xJIWU2aLZ)g8ec<$hnnpuG6u;b+X2`k*-HXcghoTq}$fBj{-(npV!m zJazV_qQjXU&OSWp3on-GBO(4TQCQw8`12ShJtp2xe(P?pFOq4QHsIiQ&svS7EZm`? zD>7V_j>2z_ekW`yM;edsG08Om_}SwnlX6WT{qtOv!^0FUHNyzwL6>)I06w+OYQGb{ z9_bIZ_rG~MpCIxI$ZR}_`~CSnSpIv%JkZ`HOIz(AFa2O7l<5#>b1jY z*VaBDvQ0}wxEFTr*b_ntT>59VY1wG^nuXff+bT4!+g33u()MyvvqY)I^)KH1 zK=FO*=6@00k~zZ3fmU>{iT*m%V-Z^TisKpnCeQP%jbB04^gExj>biWeJ~E<3z+vhS zaZuXDl`<9_cBLh48LV1sL#>13KaV1mH2S+7cPmx3-;3TbzTkL@c$fRIFIwlMj5?MF zy;m@jNXe;Xr%9tqO~#^U=xlnGmF=8&w(_j8Oo5|xIRp`q ze=2krQ7%!gQJC4-#v(C4jr>M5h&rB?9N z*-3#X{AyD=D&*rmxva?b7u)kkoO{w-YBMFIlE#=|rw27WzDGwiQZ=k@t%cBrOQcrD zLX(2uN`9$wsM}(ZxZv@@uQ;{TMy=vYZBp*x#>}o_W5FNFy6g8xa!7S-@_lJS9L&W} zMrhkg%-}Kl*2b90B*_~{xn=|AHOgDu81eJ59_F^Ri(-?Feg@B#^-1bTw{0UfFb~h7@4ZD0Pz@hVDLr#xe;EaAXqA3wYZY9W8Ada}J5n0?qq%38QI+|{g zX)oGj426{Biq^h#x6(Yw!tW;l5m?l+G=jdSEzO(#oK6^I@Nrb7kpk=syB+gaZ?24x z{hbp79Ytrf4Z#%Fwz+R%P35hXoY&WXEOAG9B1Zk_1OhwbaK{fl;w44u5K>wEkPit1RVCN z7Ls{U7#(tRT3VaM7O8qzkYj;zJ@Hufw+#0jugbXQx+tqFv&@^cI^wJ3odO-BIH_LD@d6};NbMDS6Xbh5q-AK zEy_t49OK%v^(hHBEOI#l4ctxQUeZyp#rCl?ibZw;@%Lb&yZH5_{HIK;LjB58UUNjg5K~o zyuaUpzGNrg1ZUovqC%3Sa=_crjH_2Pn_C;f*yv%pW?YuQBcV0up9p?Gc%R{%ms)?= zEOkvQQMESumX)bQS}T~3RcwDc^H@*?$sGIBh0uynq@Q|8Gh4YLhsUphdIpQ8%keki zUH!!V8=mM#HI3K?<4TpXL~d04(VyXdI$-l&C3M7GZ6I;pzN+|p{fB>ZHl3_^rq<5h z`adG}(*71`Zs9+9TOBy(J*&Y!J^Uh*;NQf}N5L3*$!QkWY z=bGrHCu5TlO3wB<^?3fwgq}|(rnJ2fK0wH-5ZlcxcrAcB3{|+T*eJ=)D$XNP3-coZ z&28xm<|aVvT-AskXjAxCTjARoWK{uAKGlTXqp46PlS(sfK2~yjQ*~bq#%)=W0;g}K zc3KC8uI{9cMz$>)^7!s6YhUp8n7UgCAYnEJ0Xa3#&W?u%W5O@&om*nV_jov}a_Q5= zo7p(^&3bOR;GZttt`H$|c~RD}biV>>z9;bomV;#hEi~Z>QIm}I{3~T7;$awCNczY2 zL-30DL*Q1EZ{_Wh^u65dcqN?w0EPv6F0rZFXj)6^T9v}V3VrEhYznF2yE`un=~kW_ zMA;Rcym3YpdMMB9UOoF${8O4uK==g0f`tDkT^IXfOEjX zHS$)!uAMbl24K8rX{~<%>z7(BnA9h1HObt{#~`o!L%ntVT}xxmmMUsTw)pe*uG8bx zr0}o8%VgE=zEZcLe{&vs=ljOE!{CpJ9wokQb6V3RS?=WA3BWx604mJTJ|=i4LPxe= z4?%0@9KF@7AjzL^YPNnO>31$JbpHSrUCzqcSm%RjU)HpxN-1gzqp7QAviu>}Z3-P` z@nDKZ$Y;3#DZ%Yi{3rOodGM-DV$$yF-%+;JLA5q6o8n~y8<-v|m({#hb-X3y#zg)V zEJb15X;WLyB(_bdl2_WcmnvE!9FmpKQE!NL8a)>z;e@?_Q(u zhxX6BzEtoB#+YN&^lPX-&YGN&kF?m3a*nO*oCDa3`GqICiccn1&S_w{cy37&wlmti z>h@^bjM8W51YZZWol43t5O`6==ACsPnFNS(Nc6>X8n%U~*|ROwB|tbkhAW-@fPZNl z%MDO`Ht{B?&u-*>gwG~IAMT9yud_AJ4)_Ph8fw~WcF}pPxnl|iV0&^p*PT+bjI7UI zGE=8?e31$S+!TFjNnPCbuIJ)+!HrYE%x`RM=D4yfcFq?heSIs=?=CkmR1NBTQ;d|h zGTUk*%bFpI}de)0#E}PJjSiHhI_4TUr+H4sIJa?;< zQ$W?smqN7ap5g!wKei5^8-dzp%J%~}zeGPoqs@@x`#+v*9afK>7S4A3e<<#?WSdLGG`lG;~wEqCY z{{S1s4YsXvnkJ;gE2Y$!f=)TZb~V&^i^d)w@c#gYu6$D+%3i~75n#4*i3|Dxiukq@ zrdpu6mIR%%osuqB70_t^02!OZ+E4a9zLJY>9I%Q9C;>fwwb1a8Zt0jq87W50==?{i z{9pK^cP6WH+NHg`&AQ&kDGX|fpO-2yGmhrCtwUDRw4HJMJ+ErACG;zqE#{5O= zH~8svQ)R8Z(WgJV7X**avnKeDWvICtm6A?RPd#^I@$356Wj7cl8yr<8=dJZRIQ~9( zze(`N{303_vo)oI5M=VyBfAcSk0-0lNHIAs(5-w0E!Kkj|7~a7~btNuapHzG@{kXL)PR)EP@q0**Skq)s zu}E9@_L6W6k$xRP$TjI2u8FVN*`E;jd%`-dx1&s~UUPpE9}Y(y7X<&tTJa z{TETb8ccU4E5jb|v0X&aO^;BIgVWPBZfGQpfn*?O0N2wVZ@>@Qo5I%KA-M4Mrlkg@ z2b*g>+#x50w&8~40EZpEZ@|}+d}RHd{vmuH)01EDnQswze3E6dl0o*%7=ZigK>gy6 zL(;igvpVA?BzfsY-JDe72!Q*Pwu@n{Xv4%_9-Q7;mfJJz!`n%Wl#zdg_2;3*9#I`t ze-*1F{KeMv8TDTi!w|N`w2>BakHBC*`qgK`@THB+wl3^s^S)7z-j!}umK&!}x8J(1=RXN45MC z@vgn%-6~x-Sh#~4ZMu&+7?xwxC+S|ZplVik)4VLFgZlJ&ME!D1;g{6O=H}u=7r~8s2Z$FQ2Vyf(B+}Hb zW@kZYw%#BXFC1*fFk)mJg|3}!VV*JO&&Pa@YNYUKdR%d8H)Mumus0ljHFv}B4!VX& zecGTF0vK|xT>QH?g8dy?GrBBYpr#319;P?}}8+Wgln z039>lmF>gNwF_;}T2gl@=#4EJC0lkezs$>n?MbQH#cu*z`MZj7och$-d~wEsK;Vw1 ztLj20l&Us*R9@!0(<)7DxhFZS>6}Aw&SND>=xb6y0nSZh>(l*;&gSu#D;zA@9YzIf z1*kob4EWXJDRsXaTt*8jTQ8KE9RTZGoOZF29l1W^tuCReHnXnXUrReOysSe%ah~-` z=T!dyOEB5K;GcUPGhImYD>u($jq!Z%Y2qC~JC#=pjGi%B@Wm``ydDVVw!R_hF<&fi za}X~!Fni=0p>GUk6;cjB8Og4Q&t#4&j?%eWLh1%rAdgy?PmsqamJbG&@m3gkIp>Oo z)FZefr)u51M9ph5T|#0mHnVj-b6MBkF|hE>%vR4BGRWP>Pp7|1yK`>x-N(V&IVQQ? zPS;Go@hY8010fHEJup41m0hx@Hjd-sSH&jrHO7;5d11N>jUjDbK;JUToCZF*6|3Py zcr*(}nZZ6@4;`~yFNod?x$OKkr@}~AMnl)Q$o#9)(@bcz)P;7hmk<~O7^!h>n^Ant zmh|A4SqUi5rfJufs|hckm~`g2o1JF=08YPI;hBaTC39Uilj3V{5`Up4_reIv!LEvEypml_UwWFnXjXMC;Vp7ykFu?JH@tM0Ml)Coi^1XZCct9 z5xXiduObhZcpPpWYX`)Cv>(Jj6NofzYUUf8c>ZXXOLRV2&UiS;u8(M}aw;!l!(-8I zQtB{~wn_V#`M>)8tJuC8d^Wn&W{sV2qZ^ctwb*!D#0%rgt8G8UJ`vNk3^4upv{x#8!Cu4e48a(b#3O0>mQ|!9{92|A7 zS5k$9%yLgWX1$ljdTbHeJ-nZMFUnXBwd39)V`9#-zaZ8rv!-)Vx=zN{rck_m9Z@>t*lVR6>kMQH+)(*B9XwbauX;PBKE`x=U-6 zknUFRTFTeZ&JvZiAlH$mlGt1+Xx?lv9sA;~9bR{Xbem(3J7Yo60ZhM9d3j|J=(8Q( z+*Mt6SR!?_w<^k`ITfuo`BjYP@d_9o`+O=%8 z2-X5*&m0WXn|%hJwl{_Kz_%v^ki257Xx=EE;a<+-Oq5gi`sT6hHAOcg%*NiCHPKmU zHn#`+N)Vu)3WcmL*{X^xX_LdLHtp&jIPlr_{{R|o$au(*Ctl>#39`y(i8ysUFmPa?Ty?#)%wHvAXjdyful*80pbzHO!wFp02R2c>7oLf25W~c`z z=}xzZMot0b^H=p4k}HCV&{b(NR2=>yyE(4R6l^UO9T_8`t6K@~EUE~>6vG-H;inQ$ zQ$WZxJ%Mf{knW3Opycya_rYkgOE3x|QpLMq43qj)<9Ip8;aum%O>Hy}0qZ)fr2g*0 zMO~+o%rF4_=_b-9do$)gh<+-&@o$MQHEWxue9Fq1IRQa91KSnDc!EY3dj>fiVz%UE zF?o)_bk2J8tbIt^$$BvS<2g0e2JCosU%t$%iEczgEO}xuM`}pmGA8$Ya&yHRjoF&# z$wlEZ20PVFO8xBNA5JT-GHlD;a?qCTV}=trm;eqs(=6@(0JJS6p55}v(-og3)RUa5fcCx$*50}T%sz-cayIDM- z+ZY@-L-nkM*De|WAJdvG^&c=D&z?TDf=L}RmEG=rU;7IF)HYrM@g4sF!%a^7#bv2N zvS|K)NN%@&@p$M6=kTwt{{Z1->iWKqJ%K-Jj%W&~MmS||8y<&keolCISbaamHkaWW zbQcRGdrl5P&3%9HclLt6)b6Y;^(|y1#h2!k3<0=|xA@mRdTu(Sri||$O^@1w#(#i* zGSdDmd?>V%FAn&De!;G2=I1K%CJQM}&5l9sTlP`uy5^T_ujsc6ac6yQvs=igpgmCf zFgzN&@vFm{f5s1ndez36c?#a8=)SXJ_+K#w9o2u^9jnT|33$%;Ll5A~z#+Df8R8Cw zys5_$D&w84!N+4tg(p{YYqoqTtJ$3a5cx4~lflNHw{Z z%Oc#j$Vmr;OjjKEYW z9OtE0iWHAPlutWiNBgI)3F%k#{WT!cTFTk}U5A1**R^KIi?g}tf3l{ns(5?B`pf9I z5=PgO$0J6gAnnM>{Oi;_FQ{MmbHjR$x+7R*l#}K$;Ryb9BV|4g_uR8T1({MGz;@*IrpzM_;I4@*Zy9qs0Fr$?HweExh{C;+pkLXTPrrUu#Ol> z0yGMPJ*$#ZXMGgYu<12KxVjo-JAB6kg2RoXw=^hh;++PfHw~Pk93QXJxjSKP9=Nf> z&ot>7+!b;Z9(vZsooRI5BbM_rK2=dmHW5?UdeUy^Qg>%Va}+X6?HXYT`2qB*(n1@9 z-?*y+7gNgpDa~mpgh{mZsZQqq02?bMyQ=XaIR5}_4P2TfCgeI@(An<| zjxY^oB!!6FPtvsIjovbWGl7yTHIS(&M?RIBwau)Fl!FOO^NuT@@pRifF{w)v;g-@u z8_?pm(S~wQ1Ga0M@uD(#S5#0=O`N#n{gP_wwuz|}k@B9osy3Bi^Q(OC$GIk}_;y&t z2_eaMIboWw;&*ASAXxd!{_r2to1zousxjbxmFPt#?0HkNY3M z%>~7`n+pP`xT{`0lg)`EI0u@|@cp{m_;Oi`=NAP){3~eFxy4bdYD1|`8CWUl$OO|g z+165H=ZdL%B{DmS3FoCY)?0xlV*?rIn$^K*WTkVZv$0tmNTZ^hliIj_RT?c++;97? zd0(w}T6?$lQ_U`MwYM=rpWaU|mZQ{Wt<#K_fh~={0Y!1Dc%*6!q{kQ_ zH@$k5)z6h`sK@UNjQuNz@U5`ZkrF%>2r|RJBD#HE=XRE*0m||Kt_l0HI%s>S!q-|a zv?J%`ehvjQMbu}}wHV$w)HZ(l;x7;rILp2|S7E1IL!-Ho$uY3O z9@S4#zq8Y9%0fV8A1c=wtazUGXDJ(;4|?LAOS962Een4ZHS1}fRgb>1?&c~{EalE*L2dB?;Cmd8#n3{AA)XWqAhy^bp96RWX~!+oAXn4mx1$40GR*=OJmli-aFJ|Jt5 zq}tA>9_X4t2)B>>tMBS-*{x$DMP?ljV_LasV=6dA{plYY{@NNnpNGCB+UR;Whho!g z?exU*V}=ljV`-vD&qjeZpqTZD&Ju?4JdamgLyKF9gj z!uR@B?wK@}mx@*4c1HwsAmIMA>CG5osq#+iV$PXx^2z(tgVZrS>YdJBQzu-tGs(D? z(pc@M*&`v?=a3CWVJz=#qVaJmj5)>uKDE~up~WrBFhtVuqqRyFOJSEBd(*8hBS!;h z#yaAX;f>pffg}@wilz&4dkGd-%C^yt_1kzeRf0{{lRkbzeuvh%eIreV9X>}9;Hu<% z(s)waQ?b3C{{T|C`AcRsQ;(M&E0Rvk-Um@*1edorS0|hx9QQe_{Z1#3=2LXyO6;nf!+JnuL?4)t^LG} zffRozKD9>Tad9DW>TpeGdowibbiNPqTjST8Bxz8m6fPb3^2L zZnI2PkY_uWu5n*6+skzg%I)EIes$S+GsM?+cknU8DJ>Jm!?7QD(s8_w%5|))eVyW) zh%PlXyRwC2x43~Mx5H<0k@vqEqY2%BL8yEcJL^9WG>g-|R*Kilc^xspJZ7y&8zTf@ zcr}%+Be6ThFlbng59v&3ij0c37bDV<;|C)ZU{hGwXD6xeN)YFh`Bbil91ezsi!=ck zj2<&yY5P@b$3ys4aRhkZdvcKsoN?b3>OM|arFhrvo26=(eg?3=wIthGU*_!RpkamM zzckk7RlkPMneXo|RvoK?0LCjDP}U){fv+wGaq_4KxvTo6%EfF>;olfFf#M5a?TsSl z&IZ{g-O9>7?ge)z?xVq|^hb%Mns(QW1pXCkERXr*R;;@bxWep^8>~o&D zJQ|XFb`AIO&#y{!-PWf&(S${k5PJHGhGfgM$j5*)$7*xCz=sSQBNZH;ci9}H9l;I= z?^^03r0|D|qC}>M(vx-Ku{kT>&-`I9@g&<(Z)t8nx)IEASpjgQpIt;7-cx& zFimo5+0gbn4L?cSEw-sNe<3@fIqO}Yg>~1m)D~uD^3?s%ai67d)>^YQjCRi^e5NIR zf#>n9OASGzV8!$4im5x72`IDa?*sf#hJO#ete2575n@)62p}E?2s~B)003HOl50yj zf+@+k+^+3{nH@m~y?puMe;R*bv&hT5ynA+&$6V&UBj6v1qPV}idE0DrOBTfdoE_Oc zIifM;*y>dskC%K!_C|^*l3-+waPB{d)-~iIt>cxB;2Q^;eayluhBHPKIsy|LHE!&OspByd|X*|-v;y=dCr!hyWR!wd|1)dh`FM4R%*k;h7gd4;;5 zWAiXMp{wWzQ{id!o4t0$ZzN!idmK}uNwAb+H(|SRn)P3b8t#o2n-#8#;S7=k91%Yd zqTqq=(Rr@~hT!>;$+(S-4RzChXli?P5hJ8ZR#*|vazRtK>490vTSItB>~!7*)wOGl zT5FgVSxjZ*mmK7Q)Yqc;Ps5jbq*LntCLUVB5olj;MU}}QMt2gMb#o#RCki{Y~fdMP&W59!&)sfs+IYX+k7_CAishO zI84&Nnp@12aR?wDJ-b(FuM5eFcy~!wH#Zmz%hD7amM6EMu1CW<-NC%mw670ZHN%Kr zHn>O(ap(KTJ%0-HI}JH3^yCf9#s+!jWAiy3e>&%C?DeTxM9R_h!ZlGG5$0$i0#*&2 zk5i5_T{fF(JhQEwDg>;+4A+#c&xv)5tE>1Xx=T@cxrM(h=nvgyI0GF0Yt%dqrCEPw z!+!{U*B>;2n>cRcKU#+DRV&?_tKr>RAjU#u034JVA3;%0rEJ0ya;N45b6q}%sov>! z!ELU9Kt@r3I5i%xXQ$l;y;L4-hcYM}X0cPWjFgx9VoI0a%I)9Pw?VW}h*%V9}@c3rHYYMc|Ol7XTb=KK07#7FX#hPqb%~_iLlnOtHXc zoTKglU=G!vwlfZQBImGNnuw;I#@2=-MSG04c<3|MvV3EwTFtwj?V~=|Juj5bbkKykZyjr)zsbjRhn|t0tEZJ9M&&ofC zpL+SP;t#_w7yLEXq|y9MqfHLQ43lllZ=Xi3>Dg*{{SsM{{YuDny!|GN-oEv_F$Q&$e?SM1b4_e8jINsy@A8OBW36kNKK*MV8!O#0UAbU6 zs<(?AmkdCzM=4zHijQJ#Q^a={6LR?kbDGMyzS@P~)|Dd@-E0Se>r{L~MX|tc7d(nD zc0PAw2gZ8UwS+A^%rpEeSbFw|JW=q?^_#G5~IZs(%AFrYa{4ChZ-C<9}l!EE!kK!r~>Wia)9Qx ze#;X4+j>@i!wFLJ;g^S}i=$|k-46tW!L65x*boQgYbiDNs(aMdEM8?f>P1tQ8Cr5fwlh~W zoCe@@k4{>gg>I97;;KZSYc zgRYhSC9aH1X-F2yxnPTugpFuB5k*13(_x)GzBOt!d28&+O1SuoEbB}wmF{xpyeCL|chB-c3` z1e0fRS+?#wBIGVwKma@*YgbO6%ez3Xs#x+{wPjnfhXs9ku9LxfeZAhj1owzcF}k#X z9B>acqPrz&E1!0JBJp3tNc;ui{{V+xDO+1DWi(`v$hRPir-)b zu<}Rt$#m_f<(YtyNcsx+Q^cMrS-e*Ur{P^v;zYAukX%plxWV9Oo>!)8=`Vu6wg#_v z;ro3L#*s!fI2jRd8Ax072b6Aja5^6Kgy~NAJxWzz(b)L!;b(|^7jlbpZY6=&baybE zs{_c-rxnixCNq`f4t=Z9EIu{(BjPOA7S6_oqijt(0i#`?o zqP`Pr9|(1*tnQ#`Y;Ud|blbJZ&a2KDM*XX~BJ|Fmd z$GTC{JYA(NA~eSv4r{GZ&98O0MwBY_mqimTmNv7T1t6ypJxoA@PnjxNrfGmGH zd1dngpGxkENgjNgQXyQ(zcX|=#ZBb}vyxN%5f;bNqhwgy%tPCnk{gKRx0VT4i6UhD z>uE#X5_dWHFE4dF^tV(1$_E+yi|bv^kXGIy99$?k6~O2&$6a|q!bkEhJ-DvI!sUh) zNavGWt>_W>i`SPd+mv!~$Guw6VYru1^KK);vH3~#rf7Op`i7>DvnUVmGCO-#?z%1P zWQNiRwt|_!`qmPSwmL6oVx^=Gp|-br(9?vjz!o45x$jhM+0lzBIX=~MPnjce=8j3{ z+M+Goq??Jc5;-6SKT}?<@MOiK+FHvfA?DQLQ`eO(it^n%65K|he-N)%_*-Kxr{kXt z_*Mk@B)JziG7RBkOt(1q#cQ$A4KzMW)dbBbapp7mp|HSkgmGDt%%*mHk%PyrQP*`# zyV92d*#a)m0qxq78S?{89Kh`%7!%jty}KSg*}ZQveUHy$fCp3Bv*o>Ou$K;n6~6B7 za!IX;!!T51=5C9<~B z?WT?kdsWK=*J&VrX1Gf>V{{QE+nfTxbr`PqL|ZF+XfO2mS9}*Sq^QT1z{un9>)NrD zbSe@(9?E?gyeQ{fy1I|--cm;r$W%IxK|hab-SABI_gW>+r(q-7*v|;Jl1Rq+n=SLU zPu;E#4-9L*C%3oK?{3x@uC78_g?;R$k1Rm;>FHhn0E2WkwwB+*_v#Ih3w>ive@Z6BMsYoai4nK@$RE7j+XXzHwVs}Y!L)+ zn=RJ3PlsM2)+hL#{{RRcpJnAj_9!lH8wrBgG^};r=*E}l2a3+RGKLsldm8BH(AwEIZ^-tnd&_1*M&HJ?K({e(1mi2SC$|Q( z&9<#M%xFN}is+=ZED3eb9qI`S!7?y~atJ#`THG3TF|KX(OL+ePdP|Ib)6HaSH+MF+ zQp{h~-W)!%~e zYz@rwNhC2`!i1@HY@-4E>x}Tn#lIE&KGsZfq?1n?v<&P^GURmpJ9<~t5qtyqYim5$ zHX3WiB+|5jV{9x;MhQ{)nr4UlJbWwB?pj#1Hj+k7yQPzM-&4s>LaNkH=Fg~W5- z&@WrhLLCqpy{@%_Qv1ImM4$p!BQB2d-K=YyqDvb!M#EL zA6rM^KM~zu{{UjhOg8dmXE7cMZwKbARFjs6TaPR(&zC+S{2K8GfEY~sxb0=?<^~b2M71SbVwH7lB zFl-(>*L+ptd6hMK4T(R5j;Fm%1VJPujsfE$odT(0lfbFrD97c&9+jh=&VFk%m-vM+ z%vP(@sTF|@oQg!Sr_K)q_O6S^K1tPpb;i{L9=ufu9tB`P0~Ng*zirfq{Q30-~emT{6l`Q&86tEOhQL%<}l9ziRQTv1llT?8)(9iKU&hdEqUT= zrt=B%7Ei@Bg;GPon>*HV;}u`>Pth+a%x(iZ2rsoq<$gz2jR^|Nw>OMJS9;%;e=a zq175USnx*f2nXJ|{{RatQT$1`x(AKUHx8B1_*=vJpNe%Ck59IF1Q4{c`F&MXWSn|e zA*Xrb{qCS|nc2DgO=nrFSWfpn7URSb7+TiaUBK}5r{H*4%J&EOM;+^t(XHdyT3K>i zsKs`k4S3)$3Y?Z)ujgE@DCmPL_|2M2musfDA>{Gk^CDLs~CT@$EwL|aE;p|aDUdieI{K}=fQV^DVAtm zL3tH+$!{X^p}XMrBDil0_=;^qMY*$)h8p5FZaOli)A`q5@ZVL_-$%TMQGl%0_&$35 z){i|HcQw7$^$5Ey4SxarQt=;zZr4$k<&mUNBmm?V>4RS5;Gc*S_=48X5ipimXDu*q zl=*MepKw6>*UWR>MQa3pr*$?6}-26JaPjlys>|$n0w6MTi8>-fA|FFO9#YY8yFr zj6^;CsUor?C8ELO8Kt?NsLoh{Tpl>COXD}e%@^YT0Dy;B~IPbgpxx?s9R$Ej7EMk?m74A$<>j*0yY}Rooy1liwAu@vFgFmxBH$ z>iQ<3XXMJaMqe#g18e-ijCLUAspxhQlv!FnM}K$X!>A=Kma^sa4Vs-wT?AX5M&C$&r_l_-HRGUv|nyOvE(a)z;^)G zwSLH1@gKzPUtPJ0UL7*S%eUM!hLG*YK7dz}>9Ij)Z8hUZlF=|3>~UX2e#0IThReVj zmx*o{GT-VLA1NOyk6)=ZQBrF6I%&tB@hPyF!_sj z!7edaz82R-{{W6O&k%T&$hvNi1bZ1!WDS`a=axNcKN|S_PoUXp`ene5X|7n`xrcX= z9!ba@^37gtNOH5#q|--z;r{><>HZMXZ+ug4XZDMoR%8)L3xg%30AW}C_T>5j&3a#e z{5b`?81&Q^7Z(IcBFDlbFv)YD;TgxduN?Se;hXJ#YftTH#igvW#Ra4`=uFpSbJO@# z0g`L#9|3q7EG|;g!p-GnIk}Z(;c{{TJ&@@&uFwZ4W)bV(9L`-wp$Xh&<+PP&k&r&q3jQb~!!cbv>WK+Qo zIK^0I(sq|?ERW(v?eA9yhZ`!Z8?1IaWMm z;MPxzJW1e>4Ozt6Hp+@7!UrNUCo#wTx>pISzsOpUnv%Ou2@NJW6T;n6E z*Ejo1cqdx%55l|U{?q$I#5a<$fFn@A?$2D0YEXo^v^nQhTVtO1;qaTm`sLgnGST6a zFaXkA*h&f8lYroG%ahi=Pw}OzuD5eMvI5?70kJR%2lTH|@L$F+5cnTNzxZ}9t~E=$ zsDf$g$(@6#Qcr60KNBCc$0g*`I?7}|WDWc+o^kD7l{m?>$gefcp@yv6Momkm1bej- zWaR!;5oQB{R&=P#-XI>e*yD6^b6n+qC0?>?p%m~4&16NX1v__R+O<9 zZsORr*id*Px#vw2%+s->b$jQ=6dan?@Wq6)0NZ~m$FkGewzCTDJR4{f1ljb>V`!sz zsTHp6|E20{+&Z(3^esAHh&~vyzkofyrKV2|QZnac z>+>l5OMaF1TjHIS!*4LoA@#UQ2xxEwvC{R1|T|8`-u*-0xm5I5}d{oqmiY}UW zGH-D$!;(1ONc`#cmg?U(Q|UxD%W}SE8T8-$vCytEyPF$( z3c~oI4gQ<1TwL7R%K&Ro*8XDSau4DM(zi5An}4_6T<`tWv}zT-Se|Q`8oXXT)1~kx zt*FHtwcEavdpx*gEPiNM{{V5jdKy&QQMrwI^fdf6JeqEXq2Din?St1m*Gr)IiEQ+{ z$RP{5bR03`isSAky3u^iFIKl`XNc^QHoz;6DgGSQ32z@LJ0M09p8xq-nPDGI_VDmrp=zme#KPy(+>8vRNd_yU62-Y>2{Gbk%Evz;+v!vIy-g+s^ z$GaJ;59Ua@9ORyYku4>E`DRbyN~F>~D8|~d#--t}4{O%HZ1BFMZD+<_!sL(qb-UL; zai;1vQh7HvtQ#5RcJ-~;>}3iFjDioPK5JR-NAL337#ph?Rkn;dQ#q+30eC-AQEGZ) zUTKorrPDxWO`c?bc``cIo&J+GyNP^~BV&Nx{*_*JFe;YhlYlEvY=_FWIQ?(o#*Tg% z-TXYau@c)$r%3@)xXTqkKi53h=6{X+J$2!K65nVykw}u+UP%iqv6OBPQV&Y~Wwo-p zxRgv`AS(lmXCpQF?f(D;5Ad`2Yxa=TRF-k(HKH09> zJx>!}hehJA9C)+hR<=Av;~U>T`JsvykS5%uYqft)^}^SGZ0XRV?vuWFtqVeTf>e>7^_g#QL|`ipD_2JmUC0NSf~rY5u4w#L zI;R_=#Pi7`#b+FY4sgm3TKjL{Uxu5)-w1SFDeS}&8*ef2d1Azzn)r{w8f@Mo@x`W_ z2n;2>NWeV^EHhuCx=xL29+Pis)1gQqSkaW^5C9w+^eL$F>U7YGQPliP7ExVB_g3ry zLI&%!X(JXD5gblqdQY0HAc$* z-DCSA>^CyunZf@6RGkfJn~Pm8%1fnT=StglcgL+mS2v2jhpp=#EYPntjcZ8LE>?XZ zTr3w8$+guHakOWtBz_f*;QcdF@vgiilgp0I&fEPZPZ?0$BIA}~Yl-ljgn8Pe@d>RX zg}n7t-Q|x$+70A-&9c<17E z{2ns#&xroX7uj~%SX*1F{_Ox>nE88Wj%&4t!CGgCzA#_v8b+*c?R1D%+z&irIa!G- z$6lHG8u05*NuNcAQE6D|^sg0oj6&9;?Zj-j36H{-D?Lh5(-7^G*KKqq zjN>hiCg((zBPK;{*y~h(urIEWjkT-DRAquMPkQuCN5euzV2#vs#Zl92bqyBmPj1dI zIT+xNN*tAnx;)EP(q|JacM+8!<7-n>M3PbzFvcXU$^Ota$5qO8sUomdruuam7y;g>EEq6tU%pQI3`DGWdSa%2?yr zLXVdV&2yT@oAxWs<+Gl66{?t;I17)5nv9bOV}(ZX)sITd)O;0nY{uG1pCP$u`Vn5e zpjq1AXUqh4$*kWLX*ynmDh+2@x3^iDjB7lhO98>gdeV(XnBA;=f$_WH2EXCGe0)jJ zJXJNCDc)9RBN73Ut?HQRSh{b<#I(3wBUaMlxCcyY)Y|xFV!$@j{7bZxUhn%#OYrwx z@U6dwE3YgWf`7UDx-qXz46CS(RI7wulzsK(@T!t<7DzJ*=L#CkaBugofrE| z4jg5Kgf~5XYL))8uD+CXyU~@FRhVzcBONK)MZ}ZFOs5Q3makedNgg!nxlC(XsVDhj zr_tad@kto@seHZMjMe=z%qjC*w@Q-EGsl+V?^X^M=D_B;*);hVR8_|quGhqNTeWPJ z^aiTOqC+&i#gYz1b4phfRoR&xnT)8}+aOm_;Q*pc6a*5WRa2xosAC!Fj+Lk3B=T-X z;4lw*$w@P!DHe4-Cgf|uN?ThcT=GfIh2KnVkqYPX3R&Iq@}GwL(a zv@TpIAU8G5_=;BYm=pD`IZ3v92`HTJh&5DOW{eT-UU%XPV|6hgbvey-8r{XE^6$cVD5cJwDx-y~>_;^fgZV zNRoS%yCaq9#dXq)-18@_k6HbOd?~2-llFkJ@b-zV{{UxQJ*4cHTQ?E0K_edhEA&&t zpAU484cot!rv1NAl!l&rpWZQJ*nf0!+ZFlk`vvP77ll7*y#rYB75m!TG;_exs|8dp zycW-HEA-pL{{RucXWbm^K0LObbQ`W`j{RIB{{VR7?ytRZVtJ!Ik7EaKXStQ5=$G0# zd1inYnB|jhHr1<(2ye5PLN}-wsD2#&)1L(XBv@*Cq>rcDDCrEVo~;nJuv6?GB{; zyN^NF9M;v(#&6l`(%4UPuIZ7+#1Aq{#RDgx&TGz_ntuyBmnU>&FNf`XuPit|_4B9v z6&Jz*{{UqB=`8LYZc|W-P=QX?ja%l)?O#f2AGEjZ+oQ@M*L-iH*)k88ebDLdMR||M zFWQgxOZcPkQ^md((>!sc>M16jaSS|z<zqix%J9{}x zXUN@!J@HspXGPRi(QiSNNP#9HdG2d-SiIA8?+Z^UoO%1eh!1-53*AG+7rKyJE7y$) z;|Cb6{VqL9cw>U@;pFL%?~qM%q5DlPZfaL|I(LF*XkYiT#zT}nbIIvk1Xj{Mp$?-f z{DhtBkbD0Chx4y$@J5fOc*jcd55q?}zMo1r8bCz(npno-p!?$(uMN4qW}6{O=kEqw zVY~75tszWfA~&RXxfIV>3?dA9hKCl2_$2rY~9blT8~4IQtkX<84i(8eXM4s;gu%a(L}pC?O?nPfNGGxW0-K zzvmNFxp zoM6|kf59+kx$xiZaiyJY9ysspE?^TJfg#zQ-uzTmZ7mM`ER(8{`a5UvE5=J69jsV( zWx5~Atf#?m6hb~@YxrCPT9=;|JQb$vhr(~-7*_ou5B6Ct+W@% zZ#)j)-OdRgL)N_6;wKxXkA2ckCVXE9?}L8#RJBpud4r0Wd?d9~n~iM;JtG;dY5a5H z>l=TxT|){3$@u{%^{n~+EqFW0Nk*d_8>2F!OKQkjM}9MbT;ATKo``OhG>Y1n!D}5t z%1cqJ;fN*z2S)5obAB55Or9^gmhV#3W42pU_Hc-tG>gL;($T+YZw*g9%>F61w$iK? z;bXAXhN6OHBtuZp$%PZC~uli}XArRv&btDDUVH(dFuK4*$d{o)5h z-nM-GOyw$TV@^MTx-3jL_=-hQ_d+1S75U%)00ou!bq9hzE%;whwYzE3{{TUC^BHl1 zl48F0@MrBw@Uz9bTum2=t*tI$k>Z`Pl0s23ZW{S-r9=Q^Kgm;Heq;`24E1)xX*2a=5`gYQ}>gj(x*9 z1m}*mKW9Kmrb*(d87Ea&9Xe*C(M+zOCNfSGA6omf_5%HqKW9&mKLs_b?~5?nPpVvM zWZJ2_@~&(l3xOgwayK0I#}(@z1ApM0-?E><4~P%q9~}73*H@eETlBB(Tc0`@<0O6L zRod)24^Fk^;(59BJy=nfB9cEdF1$~t>65jE^vfN}51tknBCKhiDvn?qPb>?6tRuMg zs^7Ix)4W6bR(SVOvW77U@jcQ&(5@6lu2nz-wgpk}wW7fe?f4k`-7#5FP0r?3Cp3ER z?Db`*-FWia_WJ&4+6Olg#|XnXQaI0JlgGX*^?OC}&w-OpvKlYKi4|JXHiyn_kBx&Q z?&;6JHTfs-%Hs3Gy0?e*Qwv9Laj67p`iF>Ta0BwMwftlISbQ|`2Bo2B{s{Q8$#pHc z8okZMtH&LSFAjw5Y=PVP)|k1}jgmU>lTH_BAQpGpRn6R2PRa~I z%0LPps-?X>YwV+4bv!3U^f;un)b5qq*G@>IfU4UeT*$-fKhHJNY2FoUPcF&UK!gcM z3!SnzCbjJR4JM1F#eJ#YO9KEHl#g)(z7Kloj)9@v>Bm%uT$18qq{@(|%!$|ld-bVx zBFOeqxyfnX9uZtmv&mV?<})W!qp4xr6-l9=Rn#VHc|*nqLuaqy+Pm0d*Y&Y;q4-b1 zw)%V$TKNWVEK5At86e8XmOj;mHj!)LeF8g+$l^<@uso!4Gq=AAXy&=2rqP#bliOKK z1*WMaB0;iZr0r$yr`Ek|_H+36q4G|{yKR6WstYHJq2X4+Ew>L2h`?|}Xs z&~7yyKf}7#xp=64&15XT%vwxwx!ZuFzB#X${vrO~KeP|+58}7bykGGpyba-tld-Mz z3qLWEGsf1(%_C!sf-A`9(b{>gf^&by?4!b#$NZfgfv^{K1`Osr4(prrhOj;-Fh?Mwa&sp0ECh-=~< z2f{j>nso8)y|ue#LWi+HI^*8HJdZ?#tIAP^C-=#AlwwSDk!L{{VufctX!tfqonK zX6E|I8~rJ+S7SKO@Pl6y+Ua)o%jU-kDthJlH5J=f#Jl2)Y4!E27H3zn_7{sk;H_qiU_8lAk3M2bM z_;yir`o)Fj;1!nU(}I0?>0D37&)b{h@5TQB4@u$w02XSxeAerC=UX&Zdt|n8z%j7o zaly}8^UX#GMCR4*RZckPApH$K!^3dQM%fXG=jJ>brI+DeswRJ0#6Pro?7k=Qoz3#y z++5wXk&;qZl(Q-JJ*%McZ^h4vJ~{BUpN0Hgtler5ST5Toxj^1Z=dtUW=_l|@-2l!& zF~4P_xwnhl$SO-;hbbt;I^DkR`-s6cYX?R;q>Nmuz1iimL#o{{lIP8G6e^Y-g->N= zcNj7>L=5sxdbOv+RcWz%tXYNahaA*CCGdusrs+2k!+#8p6Do<`o1&7@f~PgP=kXyu zeJfVTuD}Os$Sh-#l($1#8a>LJ*wwiO#zs2V4Nc7%pW9_tH`(BZ9Jk(6Gk6c#~VW%Wl2YBQTYo4SmJ@h2HvI`ghV>I0!$(HLQp*-o1Jq$kO>9s#DY)LOe-m$z%d>Rw8 z1XFd3lz~j{`8eQLJ%6mb$GPMibJI0~PUmErFg!s;0LpbkQSO>B6+Qm7;u z`@L(Kw;yVlMq963*FsvJbluLM!&*$Y`di$6rBf&F9;e>5ZG0sY*c+SeRS*z%F5W?= zcxKWlEfFNfNEoYnrMohKjexzYvJtdSNh`8~Y1Wo%T1%nV83&q*d(9>n-wyrc6)vnt z=jtiaTQq>Hw_cTV;Cl}^=)TU(;V!!7#%t5TS|=^ zY=o*#+nhDsv&DBLZ;(~WkOy(aSq~^IL8?aI3j;02! z;?EIy5619#x5LtbJ(TLm8@^{x}_#9zj&}91{W3rhmOS^X@#w$WXYCfp_n`1SYNS@N!nq61 zJ!@M*XA>Yp)Q{y_#zewJuMX>HPP|PY#1rM?$ zMX5>}qt>+l0NWS#gSTm0R`{Vj;JL%kZjHux=NYX#AKTC4riMbz;wwv)Sp!7Z79)7a zxaS_#;(j2vOAB!na}Dj&Z*J9-Z3WD+;^OI~{^@R_wDuHawML)X7^JM3?*1z*h^v!tW`f!kN4t;6% zIy)Q*9lh{PYex-2GQEW?P9jRqM^i6}JgH^56JE1U+oq7+FfMbPk)C;|kBhJEqbE$& zuI-7=e2ce?_4YMN!%Bex44EI^G`E^Hq^i;?5OKf;yCqJKF39I~D&OKOTln+D_Sb89 z7mBXDM$>Lzo52UA(Qqz$zsd!%2r zXT+Ro5otQLhK{;i2^EC)wvGOdpkVGBu;BLuSEKwo{jz*z;TYzFRPi;1#GY@Sx6#|= z+>_LE_YZ7W%in6Y+>^s5P^cr0l-r$6Cv{UV0P%{MVWBm4ZS~p3SrPng@db~H{y6xT z#TsN@R9cny+QfO_Gl9Dv*#!PH%?>%900CppFb8Vo^?y1)7+gB?lE9lzdg8l(13~A% zl*2hfN8wpQicF4n+32w@pJm~_JWa1eVzZH5ZA7Caw*-OM`qr+gcWbHmet)rQklI^Y zOtUbG?MvJ+;A8u~nD1Uis7{hzyW@=DQbjaTa-j9>NUp!B^Q*A>)S#?r&8p}Z(f{hvA=xa2p7+^Ti}TzExqv0(q|2Mp0MhIvgKFHa`Mg z%Bdsxv3|%4eWiTh&e7wb1miog`PDmL5BT3tW}0~RG_jVoaQJDE9xyAS(Y!z6JJydx z(tIN&yr{uu7ncAcp6$7?FZ60jHT7e0ogEcMMt*fR%jIoH>r!F z-@|dI+gayo2RtX^JHe=0#dMP*;Ug1`qML7_>lYf^4NFVB zx4;p_X)U`eND1m2paQ%900Jhh;eAievbT!%(Ig3QsKJ0!)k*1}>sh=+={v1a)f_e5 znmo4W_Hglzsp16F{5@bA5Z^nqXO7v&HQMQ)u}_cYkZJJE6msK!_@Z~m`#mw5`j^7q z0=n^5wWsP|2tF#OhXe{5>cTrwJ;I(oS9ai+;5pX~~h(kkrN`#zsF-p9*+6a9-mCE4ncU-+BA5XfMZ$R@qHX!j`Y zR2(@!-K+Osvk#6uTdNzd2+60wt-H*yL}l{!_1YWxS83rN+QY&+thU}7@jt|m5$X1^ zTzRKbyV9bW-O1oK58V~%pAS9=c#FlpE0@GNezW$CBKZuh%8rgrD$GF9>V8o}b`P5zVV6e-ScF0J?qi_9{9WVz0&nt z4-5QG_*HfG8@CZ%U)s$3qW=JEZ$thSrA*5c8fmLB!v#y8(oFKme_~G)-(IbqwykIz z%l-1m{3$-CrfV}${e*l+b9&K7;w?30gcsUbv4O^Vb6;g%_&-_j^bfn zFSWZcQ_z~nx6*uBsYL?ncDrLTe)w8lh@4}lK;%~o&G9cpk=^~Dg+c^=+jONUIKV8W)8_80i@&e!sIV#+9j;SyW2r2YoJy4G~RA8U*)^p%811a7J( zo!$2OIOtEfuL<~!pBbW-<3zu-)NR6%C9TYQ)TJFk=WY*AV^<8%8Q4!R!ZoqrUN8Nb ze`>8V`fW8n8w*N)9Hn$N*6i(VdpzNz4E5y=(;%RQOQar=>+f$l5U zN5b!jn%{?Y4OZ*M7pwM0^A-v3q+;Op+s$#aXqu|W=LNpTuQ-*;A;J^#9tIjt7&&BDU1^FhRt;!7WBPO{t>-3 z<<*SxlFJ(0K3D0QIc?^1rO(yug z%0@AhAn8^7M<(4?RVl{msbhA_bejPfVUB5Usm|I?$E?`4{UL)Ms5c7Zr-nt;Sx_8q z?OhgvZ{^bja-(zuC%+X8dMGz;(hOG}OWm6!Rai%o3G}Di$B&;qDf*d$*!fJ{b~))# z*~uJ?s562<&0)$eqWHEWaiWG{&Nw2uh^*pXi;u#)uMt=T@aLI~3gU7%6{k3~Yi$oh z_(@^~u(>VuuSdDEShV4FY@m=iAXfwMR?=(TF5!?9E+hf}0D`$)LRozA8nMoLR=S$a3Eb+@zUP9Zv$8?#t&K#6!HWF2`VRkd@TS8VKb+jRRZ zWm$Svipffyf3-WwAg%*%^R9IrV(m7A#8iQ+Ly z$31JHxYFfez&JjY1+OrsNE4oB9y}0NXkznj+Iwj(vtEKBzA3%c_Oy7)PQ0p z#xu@p*0#1V>C-xpNQtreR?RzHhZ{4)wH-0vQk9toJi-C?6{Djn*xbUc(6&uz%WECP zh05TT$*Ome2q#Q%E6uAb$T?act40#=5I$Bp6-sF$Qbt8-Y6U0JBe-Vy)yV12D*|Sg zNe;u)txn|ivAyAYS8J#x$mRN1TY4R(0auRa9V?met72HTpPM{aOt4(!EQ|AZIIEOZ zjbh9lXG@Rm3_@XtJX7?gjwO+HXAqDFdb6$qv*Wn{5sI&+Yy#tX%(lo|&jK1Q%*%gyFa!E^BD$HDpqYQM-Xd2^Rye1kygb ze~v()`l+irrmqANgl#dxj;se?!l<7b*dLL!g8u;TElR!2r|}urdYkVp74hr+-YXZy zckeZUib>V++*er#iekA&N%Tu%pTa!8aa`|?CDZjgaMspK1Lu`R;MEZJIlVc_iI}ed z8qw3P2i+_xot>k_5ynXLr`rOk@@F1^(wuJ76eiLaQn`v-g;j2bmi@$fGsvd|4oy^e zWO;*vc_f}H?xASYczQwVIUxOLoHR^)&Q9fz49NPngeHlLv`1@&9S3Tg><=4m+O-=z+t@T=2F^Jd zJc{pp0}&2g4=WQl>0DN&XDaLS2_k67U+#bK(6|8yfq0ctCnfeK7;IDwb zBGauBJwr;0@fe_Ga8E{B`^T>YLr2l0Ev5#0}qm%28Vu L7?;XfI`1Ul zoc9%*@W1wL@F#)w^QdS)6>qgm>ttxmT7`ofRvn1xxO>-Ws-J5G$(~`#F|fqyk;%NHd{d^mH~MClw;JNPipA)^`#3V8PccE zdmfphS&tJ#aiI+nbvykz%XzRtgkdzRxURm*b8D(bViC&3 zge*@59~?OKqByxj;Ri)abH!}`0N|8g1TQY5xbX*%JW#u%S66~iVF%NUlV3vkFX2YJ z@bkfzegpV(cd!A8E>Awfx4&nH-qqK^r zLKNN7Ig2ladgh*%_wabu(%u;Zwf(GCr}vVuz?D0p9dX{gU*fmy#rt#kk5ZoJ!T$gr zqrKBb!8&cU@oZrpI+DMYbU(G{h`dQR!?<-{hF&yywF?GoxeNh>s)0ELJwZ9-SIwUa zziqFN_fO>bhvEjH*Dh`qBoJaf(ekV@>Nq3PqMb^r9LZf8PAW+3ykGF^{t74Hwzu%7 z*kIN~b^*qxY>wgvC!_xWe2iz2SS|ZS{?Hx)5!-xL@D=8Y(>IbeNF78&BL%=6YUBYI7>WM%YK31QwE3wZc*Eg#}Y?s7G86zi*_2#^z z;jE|9?}PT_3-fh-lvy|pET}{Quj@;g4gWP!#YZbg#k*Re9~x_ zE2m10aTqa&=aG|Jej@OWpw{T?kgd^^gIv}AhpSlKM9^AIC|3R9Ve;erYqCD-xyvZo z9aOrkSI)CpN|F^EqxDtvtsM_i(r@JZ2B&1}A#5zz;18{H_BZhPK)#w{WFIqZ{AEjfqh2jRW#O0R9HD=M6nVtRX4$ka}njeBq&Fp=SdHxI>T;-2Q4qrf~%r84TWp$9oR%~7z? z@o+M{gX>+7k79#S@jUmIsf3ZUS=RPe+td&gHl;pO!#qN=PjM@;InSkL+E_^~%w#WG>a}eYd9q;$1o2R5 zI$EXNLB|2`c%}}`nVN>2Ah1y*FlUn-dSa54o>;q7YsMR{ShVk6}9jw{iv{5Ka83xUu3H2a+u;*a-i z7-ywu)sWP5K75wW>K4S7&Z9j@tj${cJ?p@pymL#0_%~|kHoR_m* z!Luc;oGA6;xRUpcuDMyArAThH)e{}V9X&Bw-Y2wxMy#j?7daKsd_2?5#EfqIMw zJH0DQR*d;5fiM86d^c=@b(3#9g>cW6-B==(?1^NtXzvD7lU?I@T6fU&%bRN_Wts*u zNWP%-u4?kgwxFyB1wqYwmw_}ZJI@B`_ZK>h@ko~YS86F3JoGg1(Tb7QqjSo~hpy4K zn>NSNqP_6l%&;6;wg*pI_4)ii;aH|IUtHfg$1Jdc{HbnyBk=gx8@G>8+D}kOHR#1^ zWhgdyllV7Kcnq=INt|UtUbPeWM_HIE$z_4q`d6&WZ{b}sMmAa$vW_;JiH<)i!|^_= z3?Z9NvZ&d8^zy8 zH(JKzO{x(5$8BpzV{>%I=GQ@mW02=4Gl5D8O^MM-9%SACGE8GPkj1xx3sxtM{3Q;F zWQ5&EDSy0B{oZgZ+GeuRBf?&2k(mcpWi`xvO3}2fU&C)}rrIP@EP;=fhvxRpYU$Zr z#!zV^`VWo)ZaPe9+a(vOAIl-+hR?6A5MSOXFgbt>)VeriNlW$lE4e8I}Te_Ek zbtwEA_lam)GV9LFmE-(rHYgrEZOxVNm3bS1&3fjQHixL_tEf#5`J;pkXCK3kYtJ=J z7HH;WFtNeois*hFT-_O7!Pjz>IM2}5Ry4G^ZDXFfhA8#Ri4c5-;kIG3#(ith{utYh zMrF5C)*>4tv91R03z@ZfpUF_tOwLSefyvKL#=Rrpg`CaeI4v$gM3*sd$3jO3^QwZh zZp(shRzq*KXe^bV{t?~8H#~58q)k#A#q*=^!i@g_u+`{V*TTz4ls&`~LntIjoAb1S zYbL`&yH#lPZCf!0{{WV#+5Ic75mSxOs?&ErP;FZ0Rj_Sy!1vlov$+CAhW+sx$gQ~= zDP!eWa-(WZk=mwm1L*J@O5`>8x& z@#t7}#%tv0slnf>KH_so^Zg4)@dtwEpTbrN;j8%Mzi6V>tajr7ovAc0k;v|~)_ghr zmpnb8YgZo;JY}Zo9$fa)GZ?PS)@O zB?f`0U%fBj~aY9)HE49KDq)fou=Av`*pEs!m&K0YJ{z6gP2)@$a-U`2Wj*VR0 zpAtR=_!GuDcZj?<@k7Ni+-WQxORG46Oa6b=+#$e_U}HG0M+&pOp1q4wk8|w57WlhM zi7dQ7t^6{y)?;`Y<4$sl=W>pU{ZWr@ab8v7+kXIjL)G`@ zDms--!{$(VQl}%ec~rl*7lyU%R?^LW9Nfd92>j(X+Ka5qG3o}>_euU5lS2KjybpKd z<8foE>b9||-!0wK9k~O9@6hl`>q<20rLN_<#z&^@ z)O4MDRnqkdq0DxvrYf`NZ{EoOmh{KHa9%CeXH1?fBMosp2Wh-3?r!V;|s` z#EU3CAS2!D(%6DyRS}(_w;=64hPlt%bHrXC*8F3oX!kO_Hg6HSir91$eaB#b+QO4w zeX7naHRnq$N9?+Vl>06;dzlK{BZJcit_^lttZnft!}9ot#8#SwGwI8fxt`l>Lp+5A zvDX|CUZxxOHPqy^Jo!0J4k+_M1khR)Q?%{m`c`9II-jyhDJ(h3>TA_J9q@`z9C&`` zR=3w6wpe2fkjy`MGai8lah|nbLH(HgZLjL-qKycyqU7ae9#~}SoRjIzeGh)u}<(E*}NWe4UyA0J+dm_Z*(&G-Y5OCwAcE1t+7HR$))Qy}Itfm8vLP#!FPa(67 zryUPpTILhNUKWaIq_o#O#Fj=0=8Q8iJd9LEsx?9untv?O; zrf&vlQFw1!urF~ugvURccOdllu2|KSXP~7{Q&vdWyzx%6b!l^_>#nG=$@{JfvU-4f z)Np8*QlITBOV+uzWdtHQ<37HXdU5em!$=+=)O6`rN0ed>L9BN|4`n2Z={_D_>H4;l z7`L^xj^yz#+0y~N!1S)#GlS8yFRRTQt#K2 z@)$5SDIjCowl!;))2`U*_IAr8T*)QI6c1|Y{12qv_`OK+B$|D-vbyF-t+zW9+dlQz zI+WnFDN>xBtZ}yxO|d@58_Gf8MV@NRP_+7Vk!uaN<@sZ7qx@^I@gIX^@Yb;^{qcmmg7*=;kjpBuQVgyueEdiVjT^ohLjek1>+4pUt5@3 zDcie@R#?*JU*R>eD>Tkzk%?o})Hbt0sQHa=A^!kJJ7nUqbtNO3blJ1u3#~@h{VlZH zmyM)Tf$l1Xzb*CVtt__=OomgE26)F@n!DgH7mo~Uji^}pW@w;wVCpaxhSGVcHLRtJ z>S@kT?)MG}^*>6vO%5=79H)ysJ##2G(8sysXePDbX%KfR`v^L+7I8y2D*D~ zH^lxrY2ngz3(Z0;I6cg;X|Oc1#R>tLT@-!6oRWAI*LVxSu=v8#KMaT2dp8!bVD5e0 z_V%n5bxqlmDieC62gQ1j)O<0eJV2lHRaW%K70KB~%RiLEEuJ~)UYD(WE7m+Iu1VmH zcufOCADV5P0V@D`!k(OSp7o0lgnT!vY7ZWde>@5@1lpv2P%JvA^*+>`;d`cNs7`UQ zm84la%#H^kv5#8%pTZvrd{txciqOwstP<4unLP_04S2`He+lTiMb)p0yh^i8r(4|> za|}%T*blpf=DjZK_P4b0MZ9`ejp6&@4DyJhStcmP@4jIx{}*7uLEhOT-r*Ch0q@?lB& zM}JDl)qG($fwi{r1(ul&^`@NZ_N3p2l0k+sBK0lUj8=T=NU340bp9joorE6@qt))7 zR=sP_Ep4}QgMvx+uR6Wbqn&1h;KEck`-T6w-4S$OA7ztwEnV}@JF(%xr@ zmnU<@`?4|DgU`KfPZxzR~W+V!pDJNfM9ti2J9u4SD|njyzl7&mCww zKZpD=;Hl&`h!a>t3Ph902h2-%Omy#Ds(7zU_(P`ZY4E-g;hPO|<@VWI>K|wk1BD^u zVpUY~eMM(Nt~MXUmBG_e*!1l~#F`{cBHXm4QLyD=IOm?Vji`86;eU$Z7k8Go7UoF_ zS;BxMU^||mR|%6aAs$Mo?Hr$K~nM)1`0f`hSN!XQh9_ zbESMhxzpmhjz_i8HH|pKa>$Fr5V>3)->qozH94+AqjYfoJ<+snD^#0E(R|M;DM(oy z=XMDP9@(yNX@)cAvXO({x+^^&!?(I(c#!z6Jx)t)LKT-yvsaDoA|!7+$RT*+117Y* zYpdvfBAM>|GXv?6q}xBRZZ1C2vOI7@j1qlA4AWSJBN<_&j<@!8@P4V{9~sy{y1W+h zT0qmJGe|KZ10pbbDL(z{*>7#_FKo;!ZejD|V*Yag$0r-W{Q2l>=f8xr_^ZS7>NnmM z@T~Vj;>p_L3riM}$|~~ZM>qqi1B_Qk@M0f`pB8*}o*T6B2BCVZq1{T&X5sE5jwCJ- zcOC#Ep{S`VY;@407Oc#_7I;ScO1+m)Yj!M>6%Vk0{^Q!C@D7`(TKI-2md;jk4pmc= z#dB-%AI07hxH?{^s;#_fb0)!bJlRPMcsueln!RuF$6M4^OZ`7wk|~cb6WSq&0Dp%g zDr=odr+W%?Xu?~YJ__*V&Y!Qt_Ov-!v)8swbUz<%AieOdrMyWZO&!5uB(efsK9#}P z_~*sC<;Rm};!A5wi-vD5>gL!S9{YIY*Ey%*LX-gKCc14e;D?GdO-|Ky-x6u_L#V?Y!MKL- zID!{kt1w@edG3JZS5*(dO%ZfMKBw^(+fCOal|))*tzu)homqfZS-~J=a&kDVWZle_ z6png(eKB<9)uoW^iIh%IWGbHL+Pw$B{wBWgrL0%l{4;7=1kfWxsO>J$5AQJUzV*jV zXCHukb9vz36~z&P$VGoU z+0`R3Cj7r#lDzcnYbo73qpqA6E~lng_=eW*);p^mCfa0Z!A6X3QV&Cs)~Rbh+c&5_ z!KK^BmdV{2;=IpP{h_`Zcx%LR>c0srd_irdNh(WqrJz>{Feh<{FdI~#yoZ809jn&7 z4f|gF7uR%@i(l}p@arf(cu|r#9Y#rQ%zXw(8LpV)RAR1%sHW|6nY{45?ZnGAp$uOy zRE-GTSz4!qH0y?umeu18+nf*1yL;c-*TR>V9%qNX7if{#M$8*g)ir|}%vbnS0C3*N zt!2skTYNo{<-gMOe-Ew9R-#*rM}?RL?}FW2`gN^VVF#*2JS0)d4-aYf)?a70fg)}J zl_A_tKb=cyX{i{@QNbZa95P#wPuCT#;cwdKTJbN4-$`$VR>@%Y{{U*Vy|dFqtWb6H z5RAw4_N`qj_QaD}u`*qFPTJU7+!&>j`rU2FPzl|zDbJuay39GYh@A*EslX2h>ylnb z?3WWO9EEFgaaJt84_)fg{{W=ul9ANyv?a0iuHNVNqrO+RmHaDaEbgt$kzPWX3V*oV z3>wDMe{6peoqa5=J{~2-w(Ltaxi0DNfHzk)0I0rJ++F-&*s zSvDRp{g(9inYB%3{uxjdOE!bbGB6wfPDdwb?)5LM>O*EI%Dy_Y>_>rO~ zqOj~ob6uW~`!@U{(KRUUWVN2`F+NSIxL{awxRKNkN+pWJwRp1OfvL@^K1tUoKEgFC2!ixP4m`!+6YfPePua)Cwpo;{Fd;w0G0l4ho8ey({5#VxCbjV8#ipLo zEVp+SaRz`f;X)`mXU+y`l4!pYPLFl1*;=&f5N))$wr0C5Pb^fpR8iPf(#J{~Bc?c1 z*Svh=<4cD&Kk$=2m0@)rxf;HmCZRU>L!G3CQbPM=gHvAoGP&^etb<5Jy1ts(R_ZC| z+_I7Z0OuKA#B*Oq*?14(ey`x0j}Uwn@pp%lQq$g5kHfzaG{}?Al~se=UP~Fq{+li% z>JA7UYv*qec+18*X1_PYPlj53*MxOTd1U)vhII?Y)Ga|!tnmWeyhXPFI^wvg&Jtef z-#NbK9fyXrYfV1xUlMpxlTm|oklLvxIT-Lg!1{Hqom<0JHo6V1=Z>_Cm6q-;+A7%J zJ4GXRj43%r9)_#l{?t15qk9&e<9!=VyE8GK+3j?-U$bO#k+$u`WMppXTGyW+?RDP` z+-i3o1h%@>B-)Fr-AJBfXbuAay%_rSt(04OqEzIMH`JriEv_Wg^sO@ZUD!rqytTek zS)xDl%RCZ22Ws_?gI}}GzxzUI_Zrv1k)+RS8mh^oYBzC5ad|p|e{#7QIN$?-Ymbjq z(ln2Sf7;Pl9Z+i5Ze)r}(DxQEBYfFW^Qs=fy{iIm9N2hbSucJEX_x*Nc;+L{O-gS$ zoN=6<6zR$=tiXRheS_56)+)7SIm-5CwV#?Sg2|935QED}AU}Bf)~CmN?+Dm@N|s~s=y>A5NM6@d!z&o`@~bNkl!4f0 zvXvRmrS>W_X&$xkYvISkJyJ-#S>nB0MKMKoCb88lwDQpv&)!)VJ8{NA=QY=Sd%gHC z;(rC|v3xPt)=T>}4`-hYiPB@yl#)S@p8f0P3-5{A2ZwaqxA5Mtd1fYyD=m&vHh$#B zILJNw)@GTk>sMN9YWkOoB)FaM^Q+$MSu@b8gWtVOaPBhHwOPfUvuXQBU3f#l7d{2l z^ed}52IBZK|YGf(hW?FXV7i_Jq}M}>sK!PH({V#Ji6|M9+@mtfW;WG2m81_ zyjI@A5?30lJ6#=)x#4{u#utah5bNcy9lldMeq@EzRO!H0&^QL*!ckql%5#?VhZbg!CpZ-|lU77|+Nm%zyF^L?^hqXWR&F_gu1+E#2D=ibl^KmKE@Isjh)+=X`H#I~>7=YBR#&;T;y;Lf64m@cWOP4=OX2IiKO$kJ z+%ls#sZ+=0*1CI5W5&Ad`gg<)I(gY*l*Q*;0L4P_vBok1t`AM|)vbl>)|wWJXQ@qi z@@|60^<+sv$=dn*m>B2OS1YIdPrC5VopBAkrq|BCGF@4g3hI_#@;QO{MqX^w|5eGmyy8-mu%p2bCHaix1xT{9s$%WZX+HZy-8%5nXcOm z7y-gBZ04%JkG?bbW5Dx#XV78PZag=k&wjL;aTi6Zni>Y4;DSS>0Pc2(&dqM>E`U1rUxIxj2=ckDzbR5_H^*Ju(}*x8vAX^Oco}#w(`}92MkVsL7MsJ zUhyx1qtbu0HC<=rU833%FlJOIE}==rGtW+IGf?=iYiB6Dk3+gP_URj?+yny*K;TF~ z`=j2owPshfr5=aUekb^s@Y+kpn)l(Alj+YOhftRO@st8jP|4FChPaJ$;})fB;}1LG zMa;TIlCrMRZYMxg^l;o^k55|jEqmh!!QT{3X>Aj0NhBcsz3w9~9n9It0nZLQgMfP1 zOKqUZ4~n#O_e5BLSSQdht}fx>~VHblTA!_Pz0;qxaDStjWF@g{H&psOAVvzJxTXVi=`3u$F^+`=%- z-~tD3o$H;_G>ty$YkSR0Mbg?hqiKHEYOt2MC9-3Xug&x|V?p@fz7w*zxS!$t38tzv zQ9*fVXLn^M3Wq!s({*niXC27*TGbm`j*_~)n_9=HCXmfKnWl=^OdlNMp4FAD+FhwS zYU^TaXrL{5JW=f=07x8U0!}KF-x05TYjS0uM>=2;Wi7P7oe1LujQ#}HBY2AQNH>3I zc^VtVF`+B|t-WiVxm?fPOzQp^UEk@toZcny*NShnsjhVY0JGoEV|4Jjk0&v)2bCc5 zz3Zdb7sIk?O??ifs$Xg{MfP@<7))sk0rKrCcRwDL!fBo$jz0{+J@%L!2J!^B$DBrX zk?wjQtv1`o+Kd`}9vjrWJ#(eK%7)rM1-Sqr<4!T}_md?^2T9QO62nOl||5Rak0q=xu5@ z-w`eC4a@5f;qM}Kl6z>`M3FP}U_)m(&P7e)Zx?um!c8y4GDoFrsQ|?K&E?rvxNd?b zRmnIU=dE#?hNpSr>*>5hs(c-=w~2N{<4tE&W&^3oCnu?@_7?iqfu`T-z8kjEmOJT@ zt$x{N(zUS%l7yTL^r+OHW_k~VJ|}9L7Mm5{hR%~^tw}8D{h?Hm9E{v!B zk`E0@z*1*b;O~e28_>Kbr|W0O);iMP&vd$mk7abwPp8Kp-QON?`;QpwN#UP}{{XX| zoju*ItMCf)$54vJ#h#;Ms=5ef{vFs-B~#pR2WsSeWALBFIs{hwq%1U>WsmJQl8_(F zL*0r009eDY2CG>7UhsUeE}h_C6whsO0sy{w@}@1_@&N&A<4HJbVeKW&vDry+Z>o51 zPZao%#+SMsozpx*;!EQ)&v6;%BN!a09hd`};?^5uB>pu?sIpWVg)5R9h zvP-L5hJmI}m`<`_ly2Dsd(};C{A+n;bxZ9}O}Ux`@~5)2)QgqJC9nr4>0GbEe~P{* z@$|E3o*nSTg{8fLNFcX~zIGttgj<2yN#}9kS6T5V!}^Aapv`-$=~rpxu`+5F5P}Mk zpDs=GA;_<0aSTx|w`GYi&1~XJ@jg;urG<%x+zW7a}RB;$}pZD@M#gKKAH1*%(JTf05UyM`B8 zS1FGtBkwRg^IBRri=xx-8p6iIQ@wlGg01}a!gjWC&JHrmk@{ASoQRLbV;A9OrQ+WP z=vs}Yxou(usNPiemTx-TuR#lE?vd&$)t`fYGU~~oYcTvpv9pHa+2*mcLR?2`B*URV z2||C`uI@jHeh1KA)?HT3Zs*)VZzpMqw(}z?Ado#Vn!R`XLVOasvbE6@;+BaU+%K0Q z)3j!aMcvR7y8v#+eJd)FY3ystuer^xtK+{0-CaR=x;>VeCBn%BQw0*l>(RE5c|Cpe zP9L-ng>);)^`8^Rq-u?Q35Bk8I|Nf_DLlG}@rDDTY~#|lHLrx911;mS*Zg(yrF`8; z=0gOJDkB4x%Z@)9ccw6F>X4I(?`FeWXNh>JejiV#+^{nF#ex*%v z#>nVAAMsDbTCa#S?-F=3#@6!PX-?5-xB5W}_Wg+7xZml{M{4Jx(VI%~#;tqeKZ{oo z_=(uwc}&I^y7LYWJf;{toYyP+Il-%I_Rr!yBT>^z+$n`5)MbJ&ig@19?l|@J&0p95 z0A;U^9wf84ztC@V{aW5RPRVrbVkqtybmz?ZfE}@sSL`Ot;W=Alps>;WEG+IO)_h50 zGys)o7TikllhX!91fNq}jpv0wXYUMLC6ekF<52TH(LL6WsYd8G{ly?V2*??OhcA01|vY zkwL0zHu`EwsqAeHVuV~xCp?xN0rajH;efyKe!g_OzY}<5S~TwjHunGqgzf(TS(h03 zy+w4;cpJn&3l;Toq)%@&Qma@Zz_(V{iJtC&g&u;5Vfj|9&Xk*HUk{DXf;A=5??l4W zP*%CTwHA!&8)F-qH8@gx4z(rMi99KP;kDJY>-%-g&m15^(Y&R3+EIxmy?E(e&%zH7 z{4@BCb{pc~gwBBeMqt!*R!G`nI;mI3_}5>ncz5<82yd zxzkTuo$;jQsp6g%OWktL+gQ}1vS^-sPiDncqF$K|-1oq)x5H8{qk5W7v+(}_!$-+J zTsOD3${03yB!k9%E7kNb0)EN&32CeU0ATpPR)ih$7rQp`Mn^w7MlwGEn#=fsul~mR zZn%%)KLu*DpvRGa{j|>pJ=jONhinm%S-OKc+)h8Ki%byY4&2KWEn?i*@fu_8lu_gOMtHbc_&KWI`7?yoP={&#pS8>e^HysgP z$~N5d9~fKde-7?e9Z$s?ZN=5ilHFTcX#z)pM?eFY%G;)EGMn3SbVeucs@7X)zMdUXUYTwyjM&Sz$L4;r*PIrEFji~<4AF>~U zV7Jw*Ke2U&dB4+VvDdCe$jRiiZrgfyrwVp{_J&m+N5qKMucV&x9JKw0BoGS{b}sUoz&((Ib*bLFhcO zfC%ZxuUz$w>Cz6(KXq9+!j=mz3=&dz=g5a9k*|&ibOv>^-vQU7TI^KJ(7r zcUc!lzx(}vf868qd4Jxo*X#Luj^u%sMqXjiO6-S*V-?sz7h#T7O?mn!N zA2;(>!il&J3%G-gY8NLh^_Rz~t#y8Lr;9g#dwc=3XD>}H5P45ch;It>-7wlUlM&eU z%99q?yCk5NHEX8u@RoNM+27lYSHAMj)(o7X>KOf~qdl--dQAC8&sOtm(8prFdVyt(uQ$*u#>3HufbQv+ zNTbLyIkXUb=7EZ!o%;rgJ7kpHE9rJEv!OmC?~$$%L_>W!(jIC0d)A`cT1?gUN6rkH zV++f)QY^s-TE#?BvI`l#0OkTnoN`=HYoA}yA)pG>W9hdrj^W@dxiG+OUYE2;P~Y**@RZc^>Ar1()fWy z=3zIG3^qU_d7h8TJnlc9zjL=G2`XR^^X(c=u0pnDL|GtZ^UQLyn^C}gC$KMKb7o&Y z=2wa3bFd}0`z3fG@m{@}g4c-K8Jrh7gUwVt_m7m%;jq;52OMBc-&s=inTpd~75%8J z(%>OF_joIWmdL1(QHlO`f!ANfnt@MzTE~145!M(dM)VIA}MEme}P^sZYq zgL_~>cH+1yMcWd4D@EEXFf*IJo}iIixSPY|D<&K+@TMpBBl>}8T$0#d)KU+q13z6! z_StUwoV_%$*SO@A>s8_=K4BEe>wwUR%;}yW8{Zs$&SA|(;zc?LaKlkG&4M2$#cu2R zgL-}a+kO1>7E>BDBsF|_Pd;=8ClK3Mv^C#9Y8?#i;`IQZjgN7R0B!IZm^|sdRHK{?@=`M`p-fyN% z=Ho+95;<}Md=f*TX`1w1X8hSLU3#-@l~q9vJwU|&Q*Q5_-aJE-o*L`kh^c*ATcKqKZjh) zzD;01h`|QMQ@DYywM`cw8v~9}tZkA$vxeNmLdimR5M@6%nnxI5Rs^aR9Nipw$*o#x ziDUnhSD(K8!9$lu7{<3cL9H>xPzb^9TXkF7#_c0nq2X7|hQ4~dj*1DLD*q^M$=Us* z_-XF&2Et@ZEVi1?$J<6|@Y>9W^<5|ofbnNa2h=aobIW#Hkl|Q0o_hhNp8o3< z>Wyr0-oqN(uD+Xt;eYs!`amaxFeg0!W@NiHzDs z`6fAxJv$c{tc%f7X=zx_MGs zzC&Zyvdg*Qv(+!-dAHcrBE#3Xe5cQ<^#RUZj0kTcooP$Wq#C4b`=K7M=qxVG9-=hN zWw+)Km^kEnZWf6YBE80^AlvY(T{CAqAvq;4vhPiU-(F6VN9D(c^{`)|ldJ3#GF{8p zws?Y3F)$OtJM{DV)_TXL%%`HwUmjw*o_{SpU9lfc-$7EqtcY*$k9rN*1UU_}Dgw3c zHc!ZmqTlb(G7s?HmaFO2HO@4(d|)GV|HFy%NNY7Z#^woVs0@mbg*(fftB0{HK_=td zj;s8!Ip`44;nNR~_D|HMmB#@GYhHgswzXQa!jA%wGKZyUgUz%Jj zDp3UGpZPf-g!U?10QLF8#v|g5>$5B=h4F~u%=p39RV=T}EvO~I8QDA`(pvwlt<_{* z*Si9Wi6k1SzBdfMxf&mOB}Fv8%!1ig%P(JigP0`(<#Nt2S z+k(8c6!%)C0NqRXiO#F3gtPF9hJCIjE|1^Sexa2G859y(=rhA?BS!vm^c6jZ7}iJD zGy9i72hVidll~|xYIaM0I^FYD3AfcExe*ue9O&I)+<;xmOB>|NcjuM-h1A`ML7GBd6d59 zd&sf9bbn&+_}AjAQ@-F5O==H&1!wCMxF0?e5by8}*g! zCm(Oh@4@MhA?!rN(l|e0CHB;xoHU){o7g<~1+DtYvG(VAdWni@eWI7==`$d9(MKDP zEUefHbVxmYFQMGCvNeFo;Z)bQ%5xoL+w%z7ziNb?`XD8+ohjYfZ+zLFE6>5dl~0vW}A@24s) zAB_o`L#cbbh{-@hW8X_7I=W`z3$;I+u8#fUH0rv!l{V@S-@H~zjXMd})+&8;8SgGM zw|XL9%)}CJTm&li#jXi#i=o(g<-p066 z*5=C903>8?CdAqaWcpQ0u;kOu2}B6I@|v6@P@{p|P+c?}ifF(ZNMbu6&ZHWR-#rh1xSYRTPooztT0pdjZo2Z zcBb;m3W%8gDvY1$)AEDuyV1wCCLg zYsq-2M@~_IEN*Hv)LqCypB}CrqC0FmTh037jBHfjyIb`v zQ)mE)oImriePj;63PA4SO?>}M`i|J9RQxv$lU}dmlCpyALg5sIU>A}m^5uA?XvP}xvpj#>F*&&8KxZ?FQN z(^`ec-k){=MqROz_vSWlr|s)zaL=brxsS|*?di}yukDDMgiW;BdEb{0f~?k-H|sh4 z{}2Q3DyLCXJ+*k>BG{{ZR+NHu_%(VOBSTFR9#fM{??&FiBQs)j0^YSu89TgG`5xm* zcYf3p0O>M>U{kACV<0rp&CWv~`~ex7eSqVC`pZ=4S|_cr%hz8II+d$A$gIJikZ8f% z$B!pNL}Bs-Iu|o?kk5u{L5q&ZfvG>`N~2GD#|mT2D*B)pgb%~aS%5iA*L!pd!<1K(1msC!uia6F_n2zDj_R`x&& z4XxlqUtCY>V9<5l=K{3cK8Q~xeV{-yAuodBN-$Z}=7nPn_a+uU!yn}Nq3!Lf+KV`7 zbT$&1xY@0~OC*W&d7QnaExlLV8wa(3{?e}jTaG-LeCSzN-!QQj+Y|l~5f_`F8XG+` z+RnPlC;-zy5oXZw+*@8C^ar74mDf1cLQvSt%1ME*`$iLL4=z+;{sso|7{SwTlOCiu zj*nv~70+Ftuf_l#FwDMt-m4ve>P7Lo!*8AZ0)8S7h?!&VJbu|5t|!%uE;rLn)nVJb zeustD!TEuFe_H)rCWt=Ri*N1D{=tNz!9NxrRxH+TiM_e&+f8qG`_!D-V1`6vylPK2 zU#+2{R=iPjcu-VDZ_+=KNj$Gjg{k@0 z62m02jU?=SX)X6QVYGQfpBK{@3xMOL-_Gfd<39?+iN~eg`c_E>_x4N>%~jPVc+QWW zq`)%_=Vvtt+iAG{2&MnSJcgABYXK-Xu9f_k1REo}gRdXYlT?VLbI>;k%ZPeo`Fn>O zLQ-URvjFb4n$__(z}(Sk(J1oHUozW^r(F{ltb;cHTe%uGHCC(K4Hi2RHd3M>Wcrwh zS9*fkM+tBqk`yrx#~x$!y2dwpk7EJ`^oVFR8J!v}mKq~WlkrAM z42xUBlFM10#eRldPuW5Q&7)3JZEljb|1#Y^+zlAYQ;(!=B6h9>z(npZvRNL6BQaal zqP*M=TdEFbO)zQV=MuL-w!rvqz+7AJ>Cux;(1xmvf^z7Ic;I=EX#`nIP7_iI@%OJF z$r7lt9Q0fYrt4Xv1e8ZI`mh!w+zJ(4N-7Ftf=XAD%k}KVQ7mvq_a#O=`$^$t(m=7B ziot^G8S72K5aHXe-?o-=?}M%h{HR>o4s?!@r`H&IQMG&jvfw6&zMTOo?lmdU;l6{4 zg4Y7O;89mxKM056FlRsM54)vH92bmV+j1h_41T6?&eb^Hh0Ok=&_fafobC{di(FIV z+{QqUwv#0ljZ_ZpEN({!GEuXqh$GJPc_y9m9iS00IC$tIH{<(-SvHw16pYXeLVrP0r=8n)gEGI4+LatYUWHcvS&F{};))AYifB4PG-t zE`d#HAc+Yz!y>ZEsqUS9>u8ay@US=CvnO}#o2zArIn>>X?FhC}&&L-i)u49l@UV)l#{y6(`lAqfCwoPg}&3->99s|Fk z9m?C{X^0yR&FBzU$6H%%Ro-p(`13=;KJ(1&s0}q=cSUP!2igIKg4Cvb@4p(S&6aR? z749P)-JacCYDb;Ezf5meCLE!cL}L0>eq)^S-5*@=?~jRp<>&iH5deIutB=n?EY~%~ zT=5bS7e>M4PNkstsbHp`qiBgxf=jM`gX`m+`u=Lm3i}W7xmS``n4B6XR4K-7nNfps zr|}=fRuP$+YB2a`Tx(ogV&L94vJ*$|W#$$Cc9NUV7Ef=nzw~WW2_LD-+U%BP=6<1* z>dy^QPp<~P36D!ZfAP2C2Wx#``uJ$OS&;!#v}!jdKQyTz#CQ2J$41veAz&FpHuH!d z8@U1RwAWxE+V+_)-HSiSS!ks)#|{b zA6(Sz<<4Nf-p0P2&Aq>5b6gudpc}xx)hZv6RX)Gt?=nD@1D1R2Dqgig?KNP;KvP|E z#XtsKwXXPJy2M|Oct2IW`PVHIaowC?k37!z@9il6)Lxk3AYGm1Fu`d_Qc&>?>0Cat zYkgX`@@}8cp{-8HZ-AgQOpDN@(?*bvPFOG~?eUS?L)ana?x|**dW~L7|5JFAYP@B) zE1$dzz3|7P*tgQB1{&*9u+5qmzk-_e6A>MO(;dAPkImzThFZUJM{IvL?we$2I0|XH z0FtC8Ve{ zDljG3qt?g;;ck?&cZhpE<2cve||lerU@g*P(dZsFT33|3xQ zj%~UUYMqqyHKxz{&9Y{-=y5B2!r$wpI-6{oI1(nL8TpJc5bV#v0dviRK> z)Bh-38E@IT?E5fl;poMwQpYy=Z^#8NE=4rM!*l9oVgWAwD^~E&rivKhUak9U=dpQ< zlY(gx#`XX2aAux~zs{)F$}m|UVJ1ed*!I}c?>r2g9%gv@_D}7(6HByfH`ILQD zpoM)Cb!#0@l`Pe97z5s~dR(rY;!7u`Vv(PIF1gaG!?ow((_@%VG{CMS3WE!YD4h1^ z=GwpCQ^&{TNNcfh=E^w0PqX*#Jn&esn)=P9>_O*-w)F0w(%*Y&7TT$Mf+<@9z}z9W zYXNJaA|n_S9*NR zLYOW-O&ld5!RWRm_u25<`!-#@6%?ymwrZ?P5=jk3Fv`YO`|?)b0!b_mxB+z=C?8@} zR~vv>K`Ih$8-Wz)A9^Cl3=J}MaIJp;1HQ*=tF=eeCsVR{fBUI)8!8`%ztNpdhu5gd zmL8)%1%APOrE?0yuB1<8guSx9+7r0!(6+rS0Jt)K0dVOvUxXjizk(fUlbjq2ct+n+<_E z2(^1xl*IQ-vJ*&_kySe76Rrd5NR0U*IS)FMu=!k5qB(eCce-6uLlN#_Rs3lJZK+Xc z*JdfDXRYPM>Rb^sDHwRzu1avnF-ETZQ4?ATmV1g1@G*Sr-O>^>R)YqB8t5mJ+duHY zO3t0N#{nzZNC43XQtlRrY{A;7aGUy%L9FKjOqe)ccqloWuB#S{AM6w5j8xB6TjEJBBzVOmaqKFnw>hKPpD3bF$lrK+#UUie6UvJy{mUb<%~ zp1vC~sQl@z(a@r|s@afjeP0CJ9Ufy#e;2=$gIDk%L49d(W^!W;w(U*;J+n8IF^{}tQfx8#)0A5#$btfoFcO=(XEcOHk!*`w z4+B)mIbYMUm300tW4*FhLlD|w#y38+CF=39YK|AIA!8a;GeYAeAFx#S(fr?08@e$y z$kk6fBi`9*b=2N6!CTA;LVPx{F60t4`e;9vV5>TZv-c4+;KRi?auQLKWh90} z3J3?`E{i>tAV>wA@ z)R#Ak)!hJ^%nO%Xq1b7@md3@P;ql(R@9G|mV>;(`nr%IyI6_{`O5jaV*4@_<2%YC6 z`tw9BNIu*%hiERHo}J~xyn&mS)si`jGV;544QG{nd?{hfDag?N2vYIP)?-@n_E-h!cvF&u^5s^-Hp?f0t|TsdBnhhuQ%-o3_$mB${i-vU|@{O{rVc9l+w2RA%x856Yqsf^+U)5{#A z!1vnrMBNq9&V9F?V4rR{cf&=n;m@RT%=n$2P;QTepRqy5Wz#vg;m-Sj?nKvr6x{8e zF>k$rvofiqK2MLsLQrKD=mhksL)UZjp=@=#r6zjP`n`gWWJlNj4b0eYer{?#FxtcYklKPf6AG?v?)rEW_~=lFIFD?U(KD;2B+L;bI4+b)!e zyg&dxVQW->LgrliH}b(+ItLG2o*U7f#1Z^*2@$v@s4zoq)%S5Wq3?J2 zdjvB(s%~PmM?sAf?Naz?6)6=TcOBKBb$_o{9HA)U}N4OC|65LU>%9mM&H*9uCq3IE=SHm*#l^ z7yNO;PCk{FU)8|H^V^kpo)ZB5udE=nFkD*#`Xgz@ov>y;SuKI}eYH>XkI0ayxL(DLv` z=Or%o!?tAz-DS}+AQWTMPz$3Rli7xtKk=lOGxat?uakyNFMyF7lf_}D>wc@^ckQ~Jsn>-s%mjV#Iar&+9=c!?v}t-} z3kq#zZcs9qFmIU2X5w}9{MyctS-WB$>;--%=H4{v{^}y95pwpmM#EiRP4%bF;RiSU zlCH)=#Kx+e@n`#jSV6|!)Yy-+?mYLuApBDzC~i~nY7{k`j;EXHfQF?_)$J`VW7KRe z6rrY!F`&nUwQ18Na7*^VFZ6bqc0=;PuXXYUmU)?vtQ!vpG|F@VZyocg`p5?PJ);#EVmrgXeZ^NaE z%1;_dftMK4kcKH-e$`L}VMyBb_)pol6=uL}=QE!?D?RUJs4?L<{*EZ~Ij2YbU2K#Z zvti6e-np6pFgO$ADrcdnixBl2#%+&o;tPy$VqZ5!mR?ZaXjJ)78p1{vJ%Dq1czVtd z$m>K@j|P7u*>dfZ+3SFz+828&3wJxk)#Hsp8Q788?<5w&nynzfX&EDe?u~-bRW)HD z{7`?7R~CR}oqh~&)p)S`7T_Ax=hR`+u2z6QNBc*>Ppc6+XjwO1Kq3MpW?{uOve;Ls zsQl7WVc?X^Sv)ep#rMMLa`@aZpcuS3ZXOu1o!^9z2#dAnzad7cr_2QlzNEu3!DIi96rvIaev1>MnvGDY;<1m~4PWI)%Nj7`< znw~qgj!;5(`c_6dFSWXCnNUvO4x}HG1Jf#PyV4!}6?#oZ_vraP2J5k>;p4GqosbA} z?T>pmqBW&ci3`>+1IxT<)4NlX26*K7Y~Ro2Z59!DK}3&dyo6|Bw~5*=o#$- z7x<{3`C5GbLOzX!Ria!F~u_z`h{i<<O5m;YgKhX0O1DqW@Gs3 zb9+ebyp8KVn;C)i=<(BsPdX1rc^0QOE?x}0UFFOjCz|MAmWPPJs2T|^#EG))^N4C^ zp)E`H;@JKDs?;%B;e@gMq$kw6Ub`SEd|l_22!8X6?>177Eb4aJIX91M$Kcd^+W*M@ zb#Rq(aI~Lq(k~=a5&uze)n5byIu$f-wzN)!UKobCnthi3WH?E`D7(z>KB#7JVdw(5 zNrLCO@Vxc`nd>SH1unnVyO*j+KWZPJHo2KzQrcns`MFX)1}O=%nhIedA|`al)D%~C z=hA?R627d-UXLEE?d3>DUWO@G#jFVKJLVDVy0>vaUfZoSu<$#Mi3a{#lpMcx&u5qJ zQ;P@rSA{K*@tB6qQ$RRLPav@vod+YKQyt@%z zw)QX?VwC@dCUmtoM1c4?*LA{j8{em%X{x|tu`nPfJ^n(n?9H(9NJw_W&Jq{V7ccXC zT*`NTQR95ExVC2zQS4p!bQlntQp?^^K2&mW(lVZJzj+v{!AXp~&?HIQnBa8dLwd7C znKH7}4{s|rD(Gui_2&yyi56G-hA3WUlbp+4P01n#xT`yajVL{*-&1N^c9}O~_cH<~ zxv9LWv}R^OCP99LGa^NH$$c{dDG1Yxb`6cXBfBIyZlMI&xm9$%2Qk~PE)hY@!p4UQ z^`5r&ob{+vxo2H>L1BeP+ciIcJvV&S^l-zUdbH6HP$B=!M*<}huiSDuB- zle0uEh=J-V%Oeew2Qo|J6J|;aymo&zZ7uspk(@BqI{Z?dz0AI8(>{w#J;ey zH~*?wfEuY~ZKfSMzPqB^%k3elCchd(uYYJ@oh4+}jDEiNFi-o&?XN$D=I^yf$jk*{ z)53g*JOMXtn!*}}^qg!UdieD&{ls?Hje@9-03D$)22EVsgExkd(vde?E-;~q$H0ij znLu;mr4Ke}KBhj$sGh(kt4F`!2vw$*UCVx%pjoYnoceGBkKv8SyiGIj$gxmx>ZO^U zHRM%1NWa-PTo5R5@@F~hv6M5ea2T}`ACrB3%GaI!x)md9eb zN;SU_ZgTG-Zt+*e?>iZ5nk@N8;qrRb>1Pl_-1O(Dvdh^W@q~iKVaTfgUK>6Q`@)21 z-(@Qx-y!pI@{na2=pO#{d$K=e(!)2c9%?+B^@5>fV7gMH5Wm|e$hpr68aw#pZN2bh zUwDgp=?2#gLJydDrcGQX06>jXWQ^0&hUmSJ+o9KRjjp%D`a8GOh)+MiL^=VE2$3d9 zO$m`xj$@=04Z`)|b(Vs6HbKEpsS>-$0Le z<{13`_1_50l6qndUvJ3URCN_oj$HC$r-Nuk2tHu^s3BthtE3>0w}a!*=wU}4oRwNo zB5-QtCdM3Q<4joKz+1a5xpRjMx!K85{heFa+49NB;OcRK>O*L^j!+-fXbE|TdKaeO z&(7p%QfGDL{tSyIA3oR)?eWv)MTAZEVBb$n_X?x%num<8FEi$CqcEdMi_elZ5*?Ch zpI9e+Q_FEX0?>tY!kI%jKsz~@Z1D-{$Hb;~_maC!ZO@xvg`>x!xdIKRp5YguEdjpi zOe<)=^(CT*XX8rUNn{B+fN58Jk&J0xj<#(C>}?Xr3d0+H|0t-{3`+i>E&t6TeV$Af3a7A|!LK|D8(v>J3m-uCgCeTQD>BGF+f$FJBe zp?QN+4%+G+Q-haz>OV@+^go#jgAi^Ti3Er#K{Q%R@l%7cFW1VSjTuSF^ygzn8ZVU> zvw@&T=44qp@h=WZYOq^5O&Nw@?X;U2QZk&-u8Vv6wE97DMWc8 z;D`u8CoNdo^jsxURTprYp0AOR8x5qOm67Yk=v_mqiPPsTJVPWYSM_fHS~@ME2Mb%V zLkJ+>N$%q`6iYE>R^ABN4L2%;g<$(7p(xvh-KWc~=28XQ+Sa`h@8EMg{5G zWnPFd6oQLu%vjChFc{g}7Faqxg$m25**r7ap*rQ_QZGMTrVZ(`z1gl_h|DSt8Sp>P zI++j)`n3qsWa&5BxNqEPr^-0pCP)ptMzmiEV43d{tP_lIKuCP_6m3OlQ=Eql8$483 zuAHn{Xs?nvP%yW z#KHGbT-VxL2Un}qbB2aZ4E5gFDtYAPI{rt&3cG_>C=<3_vIayd8gK+s zN`@HkB2M~DXg`TpaoD|7bZ;%&bR-i)aG;L6A;!4K5fdD;zpSXau=Ov8_GZ z@>vfj4}CEH%kN~55E))*{!9EOTJYLVqsHyY23Gm2xV-K0A#ia%;+bv$OXdlZiypt6hT>%xm#j&$=Ob^h8pP3cim3^-psmL3fB#`Lh+j@L%SvX#BJxdFE)I zFT2rcx&;4dHGh6oWW5Xr=;?XzY7e+@=|zQ5j2eRTw)$3H+vMNmF)p4Z0C{`o zTTQG3g6VAPqfbwm=5r$h%j`E{IcQX3cj?kI2dW?J#cm{hIxP zG8mnNTolV|b2W(+`Przl=yri?>XwjUbmz!nsBaz)=Xy!#eKrB!;o4Nv$VvI2Xv++f zD0My4csK^Gs%UUA`86piI@$D0w(}NmwmPG}SQXE&IpCuFTx}Y<`XffP57{3@Q=%K5H?VjLit8)6QKR)JN1x&N5Fs7@JB!g_8`hHy(Q_X5=rq)6)O;Xz6zu zSR*6^jcLE1>@$ti<`r_YdNkyO2giq#Jkld$NrE@m64Ann8*>cP`w8e~Qn}HtF+QRg z`?dW6zG%}Z^g!lD?U2*5Yt*E_eJ|q+hI(UAY{7!rSLEm+HL|>zx0Gbr8a&nZH`_r` ze4B^4cg^t~wAahxijP2zXt%<-gH_)UhwL45dnp?~J*h36eA^QJ)Z6cLPn*q5IO)}W z)#qcAoM=S=>j{u$iyW0|Ke;N``^(vVGi2pd+w&3Pq>DJOKXr9~e&I!W>JO%y?-KVs zdTM;B*Z)#o(>OK;#nJSj?Jb(#WoO}cPh*=V?eW7g1d)w%%)Xg}O2uzsY!DHE>DR1S+>o%0D z9_Lli@+;oj4WTE0*HO-;&r^S`i)O87WW+DFGpNKeM{u3wMR!5cp1HE75K$Koda#vl zho%Hj)9JiO`@(dLYvh2kWb-Oj9vMA4T8WJE)k6vp4bZ34n2tW?&R`);;hzi^z(tX0 z2}QgDi!wwIrcRLgSw1S&Zj!en3sSv1w9xsz@5wCJ;_Rh_0d|r+i(TEbh_M2GHchX} ztX?;$B)VJv{-GbT(+bcRt7KPF@t3uHqY?z4ZF|{hfMO=SOnZs1)e@I1GMG@m=o*Uf zo%Op-7AG(Bhm0Wa6CKqn;J1(uo8pf}pMpK7`|t1n;QO4cmMxZ76VdS3D7!er5~hGZ zi)z<#Ahh#LN5Z|75Qdgr+xagm(bTbC#OOT@Rq{4g9mc+bVl;;yVWox+qdxS0l=}V0 z=5H@IFeJcItXlgib48M_x^}+vumCeLgD`(;OC;xKM{KmE0iEr@$$rMFVJggrAS~84 zYMJs1KxYVpSx=J`1LS*s>1Fo+a2fc2*hB;wYfFyDiNF4fYfygJ;HEtH1I3DDc6~S` zP+CN?nmnO#N+ly=vTsLuzb$~K(#ucA- z3b_14=8t@4hLUUD{wb=!?GICvAVJZoePW|?us?nZ$QLbEonCeM-^_!(o}~vll2;Rg zwN!EVWSrDSG|au!!1bOunt!79Ij<56!|#?ga#~#|sxG_GAFHfEwL`X64J19dEXAT& zEEmDA@;8!{wSuvTNn@K+awc*lslz!F_5|-AduJvO@0a41W}++n8FTl)-XH*SCHLzf0%=O5-_+~X5UOc;4kxm-&7fFfbzuZT-ZxIvQ*?7PLOUV zUghDDBS#TXl$zU-eUfMd6i^<|pK+R;HwvfHDwv~jxTX8j+~+ti8dG^IXm*gk_+3-n zT|6X8{@1=}4<`6>F_}=fv@jHtVGV~vsMUM?70&m+;(NcXTpUKZy60Q<7Y*3zBTOj6 zZ(7h?ix(0V{cEi4I9MUFT(1g`TCS&$OY52kKf-Bb6}k=HXX^10NWX`8&p;U&ra2#5 zdT67&hXQn|2W}WLY7@b}c{Ypl?b9>U&qm&@d5a`TVs+nnX|qilPtaRb=Ut7H_qq(^ zP*xBd!5KYU@obFCQ(j8MOB_V;5a72!@q{D}XD5zeImu$q8IVviw(={2_e6%_n_amt z=g_nwAmGbhhge?isjTOjL&p$+ZtiLX^q?i9XNjMvUPf=(JFYtpfoXd^c%AG?^Hp{} zeD;=OGYw{l6*Zs}E*ZE7?rt zlkz$yUu9I9SM9tk??yn6U7jl|!L2xMF_1bz~@sA=-u!eeXb9M36 z1No8IbjOUvn^rEb7KTUeUKD1^3^Mv?v(quZdN>LzEw6<{0%~P`!sl$ee+($@Gh??r zESDW8>BO?Q+in9EH=C=%$!v%?5;cB!!hoPi<`BAYRag4*_OQ6M8U%P_0Qiq4Nvb?z zRe|PZ4P}GTWf{%BxSAX|DArIqcF8q$Fh-)8Z|Lnas3K5=T7)_cy_H-<&gx?vQG z%V$S~c6pF>Y{@;)pG;et_Swq4%6ZY_k&B9%@r$Y^0f|`>#KRs;YP{Qo(DwNJlg^@-vv(5Y z_!u;=M@!_-qQ6EhYG>CG%vP3Tj``n`gj@q~QhsQ&56S4()9R96l0QB^eCNAF!QCg? z5CA}E@DgKI4yub5iSs`d5^U63>QUnjlKmupbZMVYjIt3$tt@AZ~Jue zN*c=evJo|tY8K>wH54%cbZl)(V0rJ!(ZjG?18 zM0H<$tvXP~MN3qD$mJIKQuyEz9NikORp(lvsTsvaH^*x6@~cCw#_$7ddq4{{v~xWg2UH&m?cCi@RC~kdM`= zXt?=gLrVzFRabg;cu=M5ya&XW-)&a=t%~ycTdgytO*Qi1G?#J8N(|25p2qa`#Tc5P z15XP3)7L+IyL)lhj^dhur#Rz#vAky2nNp*Sn(4kNm5|RDfCV}@oz?8Z!cf684$f2$ z3hn^0K)!gRn{G0ZFd_atM*2BR^RE&8mfB05UL)#2He>4sS$W6cXVb+(>LZ3g#BCT6$W(!2EZERV(FxuK~~%xYzTVpSYZZtvrUibmnPp^0x{4; z^6CyrB8e?~vnTyg^7}?Y`CGk2#P$6Xa=)VV&RwxZRM+!LAo@*;$A_cvIKuu_ijg|p h+Qtp6Txl0g-?{G;p%!6#kS Date: Mon, 22 May 2023 10:31:34 +0800 Subject: [PATCH 04/52] [Docs] update bbox_cs2xywh comments (#2365) --- README.md | 13 ++++++------- README_CN.md | 13 ++++++------- mmpose/structures/bbox/transforms.py | 4 ++-- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 951c4adf2e..c40b9cdc4c 100644 --- a/README.md +++ b/README.md @@ -342,21 +342,20 @@ This project is released under the [Apache 2.0 license](LICENSE). - [MMEngine](https://github.com/open-mmlab/mmengine): OpenMMLab foundational library for training deep learning models. - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab foundational library for computer vision. -- [MIM](https://github.com/open-mmlab/mim): MIM installs OpenMMLab packages. -- [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab image classification toolbox and benchmark. +- [MMPreTrain](https://github.com/open-mmlab/mmpretrain): OpenMMLab pre-training toolbox and benchmark. +- [MMagic](https://github.com/open-mmlab/mmagic): Open**MM**Lab **A**dvanced, **G**enerative and **I**ntelligent **C**reation toolbox. - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab detection toolbox and benchmark. - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. - [MMRotate](https://github.com/open-mmlab/mmrotate): OpenMMLab rotated object detection toolbox and benchmark. +- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab semantic segmentation toolbox and benchmark. - [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab text detection, recognition, and understanding toolbox. - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. - [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 3D human parametric model toolbox and benchmark. -- [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab self-supervised learning toolbox and benchmark. -- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab model compression toolbox and benchmark. - [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab fewshot learning toolbox and benchmark. - [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's next-generation action understanding toolbox and benchmark. -- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. - [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab optical flow toolbox and benchmark. -- [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. -- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. - [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab Model Deployment Framework. +- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab model compression toolbox and benchmark. +- [MIM](https://github.com/open-mmlab/mim): MIM installs OpenMMLab packages. +- [Playground](https://github.com/open-mmlab/playground): A central hub for gathering and showcasing amazing projects built upon OpenMMLab. diff --git a/README_CN.md b/README_CN.md index 49a956cab9..519e9889da 100644 --- a/README_CN.md +++ b/README_CN.md @@ -339,24 +339,23 @@ MMPose 是一款由不同学校和公司共同贡献的开源项目。我们感 - [MMEngine](https://github.com/open-mmlab/mmengine): OpenMMLab 深度学习模型训练基础库 - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab 计算机视觉基础库 -- [MIM](https://github.com/open-mmlab/mim): OpenMMlab 项目、算法、模型的统一入口 -- [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱 +- [MMPreTrain](https://github.com/open-mmlab/mmpretrain): OpenMMLab 深度学习预训练工具箱 +- [MMagic](https://github.com/open-mmlab/mmagic): OpenMMLab 新一代人工智能内容生成(AIGC)工具箱 - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 目标检测工具箱 - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用 3D 目标检测平台 - [MMRotate](https://github.com/open-mmlab/mmrotate): OpenMMLab 旋转框检测工具箱与测试基准 +- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱 - [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱 - [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 人体参数化模型工具箱与测试基准 -- [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab 自监督学习工具箱与测试基准 -- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab 模型压缩工具箱与测试基准 - [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab 少样本学习工具箱与测试基准 - [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱 -- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 - [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab 光流估计工具箱与测试基准 -- [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 -- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 - [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab 模型部署框架 +- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab 模型压缩工具箱与测试基准 +- [MIM](https://github.com/open-mmlab/mim): OpenMMlab 项目、算法、模型的统一入口 +- [Playground](https://github.com/open-mmlab/playground): 收集和展示 OpenMMLab 相关的前沿、有趣的社区项目 ## 欢迎加入 OpenMMLab 社区 diff --git a/mmpose/structures/bbox/transforms.py b/mmpose/structures/bbox/transforms.py index 027ac0717b..c0c8e73395 100644 --- a/mmpose/structures/bbox/transforms.py +++ b/mmpose/structures/bbox/transforms.py @@ -111,7 +111,7 @@ def bbox_xywh2cs(bbox: np.ndarray, def bbox_cs2xyxy(center: np.ndarray, scale: np.ndarray, padding: float = 1.) -> np.ndarray: - """Transform the bbox format from (center, scale) to (x,y,w,h). + """Transform the bbox format from (center, scale) to (x1,y1,x2,y2). Args: center (ndarray): BBox center (x, y) in shape (2,) or (n, 2) @@ -120,7 +120,7 @@ def bbox_cs2xyxy(center: np.ndarray, Default: 1.0 Returns: - ndarray[float32]: BBox (x, y, w, h) in shape (4, ) or (n, 4) + ndarray[float32]: BBox (x1, y1, x2, y2) in shape (4, ) or (n, 4) """ dim = center.ndim From ae79819a6b56869b834ad717e97a5af2656fac11 Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 22 May 2023 17:17:31 +0800 Subject: [PATCH 05/52] [Docs] add RTMPose Android demo (#2387) --- projects/rtmpose/README.md | 116 +++++++++--------- projects/rtmpose/README_CN.md | 116 +++++++++--------- .../PoseTracker-Android-Prototype/README.md | 5 + projects/rtmpose/examples/README.md | 6 + 4 files changed, 131 insertions(+), 112 deletions(-) create mode 100644 projects/rtmpose/examples/PoseTracker-Android-Prototype/README.md diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index cb4e742b7c..fe30d5d93e 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -50,6 +50,10 @@ ______________________________________________________________________ ## 🥳 🚀 What's New [🔝](#-table-of-contents) +- May. 2023: + - Add [code examples](./examples/) of RTMPose. + - Release Hand models trained on 5 datasets. + - Release Body models trained on 7 datasets. - Mar. 2023: RTMPose is released. RTMPose-m runs at 430+ FPS and achieves 75.8 mAP on COCO val set. ## 📖 Introduction [🔝](#-table-of-contents) @@ -160,14 +164,14 @@ Feel free to join our community group for more help:
AIC+COCO -| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | EPE
(Body8) | Params(M) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | ncnn-FP16-Latency(ms)
(Snapdragon 865) | Download | -| :---------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | -| [RTMPose-t](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 68.5 | 91.28 | 63.38 | 19.87 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-aic-coco_pt-aic-coco_420e-256x192-cfc8f33d_20230126.pth) | -| [RTMPose-s](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 72.2 | 92.95 | 66.19 | 17.32 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-aic-coco_pt-aic-coco_420e-256x192-fcb2599b_20230126.pth) | -| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 75.8 | 94.13 | 68.53 | 15.42 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth) | -| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.5 | 94.35 | 68.98 | 15.10 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-256x192-f016ffe0_20230126.pth) | -| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 77.0 | 94.32 | 69.85 | 14.64 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-384x288-a62a0b32_20230228.pth) | -| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 77.3 | 94.54 | 70.14 | 14.30 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-384x288-97d6cb0f_20230228.pth) | +| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | Params
(M) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :---------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-t](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 68.5 | 91.28 | 63.38 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-aic-coco_pt-aic-coco_420e-256x192-cfc8f33d_20230126.pth) | +| [RTMPose-s](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 72.2 | 92.95 | 66.19 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-aic-coco_pt-aic-coco_420e-256x192-fcb2599b_20230126.pth) | +| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 75.8 | 94.13 | 68.53 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth) | +| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.5 | 94.35 | 68.98 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-256x192-f016ffe0_20230126.pth) | +| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 77.0 | 94.32 | 69.85 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-384x288-a62a0b32_20230228.pth) | +| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 77.3 | 94.54 | 70.14 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-384x288-97d6cb0f_20230228.pth) |
@@ -184,14 +188,14 @@ Feel free to join our community group for more help: - [PoseTrack18](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#posetrack18) - `Body8` denotes the addition of the [OCHuman](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#ochuman) dataset, in addition to the 7 datasets mentioned above, for evaluation. -| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | EPE
(Body8) | Params(M) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | ncnn-FP16-Latency(ms)
(Snapdragon 865) | Download | -| :-----------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | -| [RTMPose-t\*](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 65.9 | 91.44 | 63.18 | 19.45 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth) | -| [RTMPose-s\*](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 69.7 | 92.45 | 65.15 | 17.85 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth) | -| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 74.9 | 94.25 | 68.59 | 15.12 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth) | -| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 13.79 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | -| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.98 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | -| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 13.08 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | +| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | Params
(M) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :-----------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-t\*](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 65.9 | 91.44 | 63.18 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth) | +| [RTMPose-s\*](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 69.7 | 92.45 | 65.15 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 74.9 | 94.25 | 68.59 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | @@ -201,25 +205,25 @@ Feel free to join our community group for more help: - Model pruning is supported by [MMRazor](https://github.com/open-mmlab/mmrazor) -| Config | Input Size | AP
(COCO) | Params(M) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | ncnn-FP16-Latency(ms)
(Snapdragon 865) | Download | -| :--------------: | :--------: | :---------------: | :-------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | :-----------------: | -| RTMPose-s-aic-coco-pruned | 256x192 | 69.4 | 3.43 | 0.35 | - | - | - | [Model](https://download.openmmlab.com/mmrazor/v1/pruning/group_fisher/rtmpose-s/group_fisher_finetune_rtmpose-s_8xb256-420e_aic-coco-256x192.pth) | +| Config | Input Size | AP
(COCO) | Params
(M) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :-----------------------: | :--------: | :---------------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | +| RTMPose-s-aic-coco-pruned | 256x192 | 69.4 | 3.43 | 0.35 | - | - | - | [Model](https://download.openmmlab.com/mmrazor/v1/pruning/group_fisher/rtmpose-s/group_fisher_finetune_rtmpose-s_8xb256-420e_aic-coco-256x192.pth) | For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pruning/README.md). ### WholeBody 2d (133 Keypoints) -| Config | Input Size | Whole AP | Whole AR | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :------------------------------------------- | :--------: | :------: | :------: | :------: | :--------------------------------: | :---------------------------------------: | :---------------------------------------------: | -| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 60.4 | 66.7 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 63.2 | 69.4 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 67.0 | 72.3 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | +| Config | Input Size | Whole AP | Whole AR | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | +| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 60.4 | 66.7 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 63.2 | 69.4 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 67.0 | 72.3 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | ### Animal 2d (17 Keypoints) -| Config | Input Size | AP
(AP10K) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :-----------------------------------------: | :--------: | :----------------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | -| [RTMPose-m](./rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py) | 256x256 | 72.2 | 2.57 | 14.157 | 2.404 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth) | +| Config | Input Size | AP
(AP10K) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | +| [RTMPose-m](./rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py) | 256x256 | 72.2 | 2.57 | 14.157 | 2.404 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth) | ### Face 2d (106 Keypoints) @@ -227,15 +231,15 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr -| Config | Input Size | NME
(LaPa) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :----------------------------------------------------------------------------: | :--------: | :----------------: | :------: | :--------------------------------: | :---------------------------------------: | :---------: | -| [RTMPose-m (alpha version)](./rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py) | 256x256 | 1.70 | - | - | - | Coming soon | +| Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :-------------------------------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :---------: | +| [RTMPose-m (alpha version)](./rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py) | 256x256 | 1.70 | - | - | - | Coming soon | ### Hand 2d (21 Keypoints) -| Detection Config | Input Size | Model AP
(OneHand10K) | Flops
(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :------------------------------------: | :--------: | :---------------------------: | :---------------: | :--------------------------------: | :---------------------------------------: | :-----------------------------: | -| [RTMDet-nano
(alpha version)](./rtmdet/hand/rtmdet_nano_320-8xb32_hand.py) | 320x320 | 76.0 | 0.31 | - | - | [Det Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmdet_nano_8xb32-300e_hand-267f9c8f.pth) | +| Detection Config | Input Size | Model AP
(OneHand10K) | Flops
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :---------------------------: | :--------: | :---------------------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------: | +| [RTMDet-nano
(alpha version)](./rtmdet/hand/rtmdet_nano_320-8xb32_hand.py) | 320x320 | 76.0 | 0.31 | - | - | [Det Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmdet_nano_8xb32-300e_hand-267f9c8f.pth) |
Hand5 @@ -247,9 +251,9 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr - [RHD2d](https://lmb.informatik.uni-freiburg.de/resources/datasets/RenderedHandposeDataset.en.html) - [Halpe](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_wholebody_keypoint.html#halpe) -| Config | Input Size | PCK@0.2
(COCO-Wholebody-Hand) | PCK@0.2
(Hand5) | AUC
(Hand5) | EPE
(Hand5) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :-------------------------------------------------------------------------------------------------------------------: | :--------: | :-----------------------------------: | :---------------------: | :-----------------: | :-----------------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | -| [RTMPose-m\*
(alpha version)](./rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py) | 256x256 | 81.5 | 96.4 | 83.9 | 5.06 | 2.581 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-hand5_pt-aic-coco_210e-256x256-74fb594_20230320.pth) | +| Config | Input Size | PCK@0.2
(COCO-Wholebody-Hand) | PCK@0.2
(Hand5) | AUC
(Hand5) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :-------------------------------------------------------------------------------------------------------------------: | :--------: | :-----------------------------------: | :---------------------: | :-----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-m\*
(alpha version)](./rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py) | 256x256 | 81.5 | 96.4 | 83.9 | 2.581 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-hand5_pt-aic-coco_210e-256x256-74fb594_20230320.pth) |
@@ -260,12 +264,12 @@ We provide the UDP pretraining configs of the CSPNeXt backbone. Find more detail
AIC+COCO -| Model | Input Size | Params(M) | Flops(G) | AP
(GT) | AR
(GT) | Download | -| :----------: | :--------: | :-------: | :------: | :-------------: | :-------------: | :-----------------------------------------------------------------------------------------------------------------------------: | -| CSPNeXt-tiny | 256x192 | 6.03 | 1.43 | 65.5 | 68.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-tiny_udp-aic-coco_210e-256x192-cbed682d_20230130.pth) | -| CSPNeXt-s | 256x192 | 8.58 | 1.78 | 70.0 | 73.3 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth) | -| CSPNeXt-m | 256x192 | 17.53 | 3.05 | 74.8 | 77.7 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth) | -| CSPNeXt-l | 256x192 | 32.44 | 5.32 | 77.2 | 79.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-l_udp-aic-coco_210e-256x192-273b7631_20230130.pth) | +| Model | Input Size | Params
(M) | Flops
(G) | AP
(GT) | AR
(GT) | Download | +| :----------: | :--------: | :----------------: | :---------------: | :-------------: | :-------------: | :---------------------------------------------------------------------------------------------------------------: | +| CSPNeXt-tiny | 256x192 | 6.03 | 1.43 | 65.5 | 68.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-tiny_udp-aic-coco_210e-256x192-cbed682d_20230130.pth) | +| CSPNeXt-s | 256x192 | 8.58 | 1.78 | 70.0 | 73.3 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth) | +| CSPNeXt-m | 256x192 | 17.53 | 3.05 | 74.8 | 77.7 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth) | +| CSPNeXt-l | 256x192 | 32.44 | 5.32 | 77.2 | 79.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-l_udp-aic-coco_210e-256x192-273b7631_20230130.pth) |
@@ -282,14 +286,14 @@ We provide the UDP pretraining configs of the CSPNeXt backbone. Find more detail - [PoseTrack18](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#posetrack18) - `Body8` denotes the addition of the [OCHuman](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#ochuman) dataset, in addition to the 7 datasets mentioned above, for evaluation. -| Model | Input Size | Params(M) | Flops(G) | AP
(COCO) | PCK@0.2
(Body8) | AUC
(Body8) | EPE
(Body8) | Download | -| :------------: | :--------: | :-------: | :------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------------------------------------------------------------------------------: | -| CSPNeXt-tiny\* | 256x192 | 6.03 | 1.43 | 65.9 | 96.34 | 63.80 | 18.63 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-tiny_udp-body7_210e-256x192-a3775292_20230504.pth) | -| CSPNeXt-s\* | 256x192 | 8.58 | 1.78 | 68.7 | 96.59 | 64.92 | 17.84 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-s_udp-body7_210e-256x192-8c9ccbdb_20230504.pth) | -| CSPNeXt-m\* | 256x192 | 17.53 | 3.05 | 73.7 | 97.42 | 68.19 | 15.12 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-256x192-e0c9327b_20230504.pth) | -| CSPNeXt-l\* | 256x192 | 32.44 | 5.32 | 75.7 | 97.76 | 69.57 | 13.96 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-256x192-5e9558ef_20230504.pth) | -| CSPNeXt-m\* | 384x288 | 17.53 | 6.86 | 75.8 | 97.60 | 70.18 | 14.04 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-384x288-b9bc2b57_20230504.pth) | -| CSPNeXt-l\* | 384x288 | 32.44 | 11.96 | 77.2 | 97.89 | 71.23 | 13.05 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-384x288-b15bc30d_20230504.pth) | +| Model | Input Size | Params
(M) | Flops
(G) | AP
(COCO) | PCK@0.2
(Body8) | AUC
(Body8) | Download | +| :------------: | :--------: | :----------------: | :---------------: | :---------------: | :---------------------: | :-----------------: | :--------------------------------------------------------------------------------: | +| CSPNeXt-tiny\* | 256x192 | 6.03 | 1.43 | 65.9 | 96.34 | 63.80 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-tiny_udp-body7_210e-256x192-a3775292_20230504.pth) | +| CSPNeXt-s\* | 256x192 | 8.58 | 1.78 | 68.7 | 96.59 | 64.92 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-s_udp-body7_210e-256x192-8c9ccbdb_20230504.pth) | +| CSPNeXt-m\* | 256x192 | 17.53 | 3.05 | 73.7 | 97.42 | 68.19 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-256x192-e0c9327b_20230504.pth) | +| CSPNeXt-l\* | 256x192 | 32.44 | 5.32 | 75.7 | 97.76 | 69.57 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-256x192-5e9558ef_20230504.pth) | +| CSPNeXt-m\* | 384x288 | 17.53 | 6.86 | 75.8 | 97.60 | 70.18 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-384x288-b9bc2b57_20230504.pth) | +| CSPNeXt-l\* | 384x288 | 32.44 | 11.96 | 77.2 | 97.89 | 71.23 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-384x288-b15bc30d_20230504.pth) | @@ -297,12 +301,12 @@ We provide the UDP pretraining configs of the CSPNeXt backbone. Find more detail We also provide the ImageNet classification pre-trained weights of the CSPNeXt backbone. Find more details in [RTMDet](https://github.com/open-mmlab/mmdetection/blob/latest/configs/rtmdet/README.md#classification). -| Model | Input Size | Params(M) | Flops(G) | Top-1 (%) | Top-5 (%) | Download | -| :----------: | :--------: | :-------: | :------: | :-------: | :-------: | :---------------------------------------------------------------------------------------------------------------------------------: | -| CSPNeXt-tiny | 224x224 | 2.73 | 0.34 | 69.44 | 89.45 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-tiny_imagenet_600e-3a2dd350.pth) | -| CSPNeXt-s | 224x224 | 4.89 | 0.66 | 74.41 | 92.23 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth) | -| CSPNeXt-m | 224x224 | 13.05 | 1.93 | 79.27 | 94.79 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth) | -| CSPNeXt-l | 224x224 | 27.16 | 4.19 | 81.30 | 95.62 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-l_8xb256-rsb-a1-600e_in1k-6a760974.pth) | +| Model | Input Size | Params
(M) | Flops
(G) | Top-1 (%) | Top-5 (%) | Download | +| :----------: | :--------: | :----------------: | :---------------: | :-------: | :-------: | :---------------------------------------------------------------------------------------------------------------------------: | +| CSPNeXt-tiny | 224x224 | 2.73 | 0.34 | 69.44 | 89.45 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-tiny_imagenet_600e-3a2dd350.pth) | +| CSPNeXt-s | 224x224 | 4.89 | 0.66 | 74.41 | 92.23 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth) | +| CSPNeXt-m | 224x224 | 13.05 | 1.93 | 79.27 | 94.79 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth) | +| CSPNeXt-l | 224x224 | 27.16 | 4.19 | 81.30 | 95.62 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-l_8xb256-rsb-a1-600e_in1k-6a760974.pth) | ## 👀 Visualization [🔝](#-table-of-contents) diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 72d8c08b7b..ee7109944b 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -46,6 +46,10 @@ ______________________________________________________________________ ## 🥳 最新进展 [🔝](#-table-of-contents) +- 2023 年 5 月: + - 添加 [代码示例](./examples/) + - 发布混合数据集训练的 Hand 模型。 + - 发布混合数据集训练的 Body 模型。 - 2023 年 3 月:发布 RTMPose。RTMPose-m 取得 COCO 验证集 75.8 mAP,推理速度达到 430+ FPS 。 ## 📖 简介 [🔝](#-table-of-contents) @@ -151,14 +155,14 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性
AIC+COCO -| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | EPE
(Body8) | Params(M) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | ncnn-FP16-Latency(ms)
(Snapdragon 865) | Download | -| :---------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | -| [RTMPose-t](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 68.5 | 91.28 | 63.38 | 19.87 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-aic-coco_pt-aic-coco_420e-256x192-cfc8f33d_20230126.pth) | -| [RTMPose-s](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 72.2 | 92.95 | 66.19 | 17.32 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-aic-coco_pt-aic-coco_420e-256x192-fcb2599b_20230126.pth) | -| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 75.8 | 94.13 | 68.53 | 15.42 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth) | -| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.5 | 94.35 | 68.98 | 15.10 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-256x192-f016ffe0_20230126.pth) | -| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 77.0 | 94.32 | 69.85 | 14.64 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-384x288-a62a0b32_20230228.pth) | -| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 77.3 | 94.54 | 70.14 | 14.30 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-384x288-97d6cb0f_20230228.pth) | +| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | Params
(M) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :---------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-t](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 68.5 | 91.28 | 63.38 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-aic-coco_pt-aic-coco_420e-256x192-cfc8f33d_20230126.pth) | +| [RTMPose-s](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 72.2 | 92.95 | 66.19 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-aic-coco_pt-aic-coco_420e-256x192-fcb2599b_20230126.pth) | +| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 75.8 | 94.13 | 68.53 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth) | +| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.5 | 94.35 | 68.98 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-256x192-f016ffe0_20230126.pth) | +| [RTMPose-m](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 77.0 | 94.32 | 69.85 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-384x288-a62a0b32_20230228.pth) | +| [RTMPose-l](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 77.3 | 94.54 | 70.14 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-aic-coco_pt-aic-coco_420e-384x288-97d6cb0f_20230228.pth) |
@@ -175,14 +179,14 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 - [PoseTrack18](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#posetrack18) - `Body8` 代表除了以上提到的 7 个数据集,再加上 [OCHuman](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#ochuman) 合并后一起进行评测得到的指标。 -| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | EPE
(Body8) | Params(M) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | ncnn-FP16-Latency(ms)
(Snapdragon 865) | Download | -| :-----------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | -| [RTMPose-t\*](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 65.9 | 91.44 | 63.18 | 19.45 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth) | -| [RTMPose-s\*](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 69.7 | 92.45 | 65.15 | 17.85 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth) | -| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 74.9 | 94.25 | 68.59 | 15.12 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth) | -| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 13.79 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | -| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.98 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | -| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 13.08 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | +| Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | Params
(M) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :-----------------------------------------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-t\*](./rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 65.9 | 91.44 | 63.18 | 3.34 | 0.36 | 3.20 | 1.06 | 9.02 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth) | +| [RTMPose-s\*](./rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 69.7 | 92.45 | 65.15 | 5.47 | 0.68 | 4.48 | 1.39 | 13.89 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 74.9 | 94.25 | 68.59 | 13.59 | 1.93 | 11.06 | 2.29 | 26.44 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | @@ -192,25 +196,25 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 - 模型剪枝由 [MMRazor](https://github.com/open-mmlab/mmrazor) 提供 -| Config | Input Size | AP
(COCO) | Params(M) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | ncnn-FP16-Latency(ms)
(Snapdragon 865) | Download | -| :--------------: | :--------: | :---------------: | :-------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | :-----------------: | -| RTMPose-s-aic-coco-pruned | 256x192 | 69.4 | 3.43 | 0.35 | - | - | - | [Model](https://download.openmmlab.com/mmrazor/v1/pruning/group_fisher/rtmpose-s/group_fisher_finetune_rtmpose-s_8xb256-420e_aic-coco-256x192.pth) | +| Config | Input Size | AP
(COCO) | Params
(M) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :-----------------------: | :--------: | :---------------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | +| RTMPose-s-aic-coco-pruned | 256x192 | 69.4 | 3.43 | 0.35 | - | - | - | [Model](https://download.openmmlab.com/mmrazor/v1/pruning/group_fisher/rtmpose-s/group_fisher_finetune_rtmpose-s_8xb256-420e_aic-coco-256x192.pth) | 更多信息,请参考 [GroupFisher Pruning for RTMPose](./rtmpose/pruning/README.md). ### 人体全身 2d 关键点 (133 Keypoints) -| Config | Input Size | Whole AP | Whole AR | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :------------------------------------------- | :--------: | :------: | :------: | :------: | :--------------------------------: | :---------------------------------------: | :---------------------------------------------: | -| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 60.4 | 66.7 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 63.2 | 69.4 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 67.0 | 72.3 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | +| Config | Input Size | Whole AP | Whole AR | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | +| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 60.4 | 66.7 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 63.2 | 69.4 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 67.0 | 72.3 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | ### 动物 2d 关键点 (17 Keypoints) -| Config | Input Size | AP
(AP10K) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :-----------------------------------------: | :--------: | :----------------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------: | -| [RTMPose-m](./rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py) | 256x256 | 72.2 | 2.57 | 14.157 | 2.404 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth) | +| Config | Input Size | AP
(AP10K) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | +| [RTMPose-m](./rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py) | 256x256 | 72.2 | 2.57 | 14.157 | 2.404 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth) | ### 脸部 2d 关键点 (106 Keypoints) @@ -218,15 +222,15 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 -| Config | Input Size | NME
(LaPa) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :----------------------------------------------------------------------------: | :--------: | :----------------: | :------: | :--------------------------------: | :---------------------------------------: | :---------: | -| [RTMPose-m (试用)](./rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py) | 256x256 | 1.70 | - | - | - | Coming soon | +| Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :-------------------------------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :---------: | +| [RTMPose-m (试用)](./rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py) | 256x256 | 1.70 | - | - | - | Coming soon | ### 手部 2d 关键点 (21 Keypoints) -| Detection Config | Input Size | Model AP
(OneHand10K) | Flops
(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :------------------------------------: | :--------: | :---------------------------: | :---------------: | :--------------------------------: | :---------------------------------------: | :-----------------------------: | -| [RTMDet-nano (试用)](./rtmdet/hand/rtmdet_nano_320-8xb32_hand.py) | 320x320 | 76.0 | 0.31 | - | - | [Det Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmdet_nano_8xb32-300e_hand-267f9c8f.pth) | +| Detection Config | Input Size | Model AP
(OneHand10K) | Flops
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :---------------------------: | :--------: | :---------------------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------: | +| [RTMDet-nano (试用)](./rtmdet/hand/rtmdet_nano_320-8xb32_hand.py) | 320x320 | 76.0 | 0.31 | - | - | [Det Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmdet_nano_8xb32-300e_hand-267f9c8f.pth) |
Hand5 @@ -238,9 +242,9 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 - [RHD2d](https://lmb.informatik.uni-freiburg.de/resources/datasets/RenderedHandposeDataset.en.html) - [Halpe](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_wholebody_keypoint.html#halpe) -| Config | Input Size | PCK@0.2
(COCO-Wholebody-Hand) | PCK@0.2
(Hand5) | AUC
(Hand5) | EPE
(Hand5) | FLOPS(G) | ORT-Latency(ms)
(i7-11700) | TRT-FP16-Latency(ms)
(GTX 1660Ti) | Download | -| :--------------------------------------------------------------------------------------------------: | :--------: | :-----------------------------------: | :---------------------: | :-----------------: | :-----------------: | :------: | :--------------------------------: | :---------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | -| [RTMPose-m\* (试用)](./rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py) | 256x256 | 81.5 | 96.4 | 83.9 | 5.06 | 2.581 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-hand5_pt-aic-coco_210e-256x256-74fb594_20230320.pth) | +| Config | Input Size | PCK@0.2
(COCO-Wholebody-Hand) | PCK@0.2
(Hand5) | AUC
(Hand5) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :----------------------------------------------------------------------------------------------------------: | :--------: | :-----------------------------------: | :---------------------: | :-----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-m\*
(试用)](./rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py) | 256x256 | 81.5 | 96.4 | 83.9 | 2.581 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-hand5_pt-aic-coco_210e-256x256-74fb594_20230320.pth) |
@@ -251,12 +255,12 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性
AIC+COCO -| Model | Input Size | Params(M) | Flops(G) | AP
(GT) | AR
(GT) | Download | -| :----------: | :--------: | :-------: | :------: | :-------------: | :-------------: | :-----------------------------------------------------------------------------------------------------------------------------: | -| CSPNeXt-tiny | 256x192 | 6.03 | 1.43 | 65.5 | 68.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-tiny_udp-aic-coco_210e-256x192-cbed682d_20230130.pth) | -| CSPNeXt-s | 256x192 | 8.58 | 1.78 | 70.0 | 73.3 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth) | -| CSPNeXt-m | 256x192 | 17.53 | 3.05 | 74.8 | 77.7 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth) | -| CSPNeXt-l | 256x192 | 32.44 | 5.32 | 77.2 | 79.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-l_udp-aic-coco_210e-256x192-273b7631_20230130.pth) | +| Model | Input Size | Params
(M) | Flops
(G) | AP
(GT) | AR
(GT) | Download | +| :----------: | :--------: | :----------------: | :---------------: | :-------------: | :-------------: | :---------------------------------------------------------------------------------------------------------------: | +| CSPNeXt-tiny | 256x192 | 6.03 | 1.43 | 65.5 | 68.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-tiny_udp-aic-coco_210e-256x192-cbed682d_20230130.pth) | +| CSPNeXt-s | 256x192 | 8.58 | 1.78 | 70.0 | 73.3 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth) | +| CSPNeXt-m | 256x192 | 17.53 | 3.05 | 74.8 | 77.7 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth) | +| CSPNeXt-l | 256x192 | 32.44 | 5.32 | 77.2 | 79.9 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmpose/cspnext-l_udp-aic-coco_210e-256x192-273b7631_20230130.pth) |
@@ -273,14 +277,14 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 - [PoseTrack18](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#posetrack18) - `Body8` 代表除了以上提到的 7 个数据集,再加上 [OCHuman](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#ochuman) 合并后一起进行评测得到的指标。 -| Model | Input Size | Params(M) | Flops(G) | AP
(COCO) | PCK@0.2
(Body8) | AUC
(Body8) | EPE
(Body8) | Download | -| :------------: | :--------: | :-------: | :------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------------------------------------------------------------------------------: | -| CSPNeXt-tiny\* | 256x192 | 6.03 | 1.43 | 65.9 | 96.34 | 63.80 | 18.63 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-tiny_udp-body7_210e-256x192-a3775292_20230504.pth) | -| CSPNeXt-s\* | 256x192 | 8.58 | 1.78 | 68.7 | 96.59 | 64.92 | 17.84 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-s_udp-body7_210e-256x192-8c9ccbdb_20230504.pth) | -| CSPNeXt-m\* | 256x192 | 17.53 | 3.05 | 73.7 | 97.42 | 68.19 | 15.12 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-256x192-e0c9327b_20230504.pth) | -| CSPNeXt-l\* | 256x192 | 32.44 | 5.32 | 75.7 | 97.76 | 69.57 | 13.96 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-256x192-5e9558ef_20230504.pth) | -| CSPNeXt-m\* | 384x288 | 17.53 | 6.86 | 75.8 | 97.60 | 70.18 | 14.04 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-384x288-b9bc2b57_20230504.pth) | -| CSPNeXt-l\* | 384x288 | 32.44 | 11.96 | 77.2 | 97.89 | 71.23 | 13.05 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-384x288-b15bc30d_20230504.pth) | +| Model | Input Size | Params
(M) | Flops
(G) | AP
(COCO) | PCK@0.2
(Body8) | AUC
(Body8) | Download | +| :------------: | :--------: | :----------------: | :---------------: | :---------------: | :---------------------: | :-----------------: | :--------------------------------------------------------------------------------: | +| CSPNeXt-tiny\* | 256x192 | 6.03 | 1.43 | 65.9 | 96.34 | 63.80 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-tiny_udp-body7_210e-256x192-a3775292_20230504.pth) | +| CSPNeXt-s\* | 256x192 | 8.58 | 1.78 | 68.7 | 96.59 | 64.92 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-s_udp-body7_210e-256x192-8c9ccbdb_20230504.pth) | +| CSPNeXt-m\* | 256x192 | 17.53 | 3.05 | 73.7 | 97.42 | 68.19 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-256x192-e0c9327b_20230504.pth) | +| CSPNeXt-l\* | 256x192 | 32.44 | 5.32 | 75.7 | 97.76 | 69.57 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-256x192-5e9558ef_20230504.pth) | +| CSPNeXt-m\* | 384x288 | 17.53 | 6.86 | 75.8 | 97.60 | 70.18 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-384x288-b9bc2b57_20230504.pth) | +| CSPNeXt-l\* | 384x288 | 32.44 | 11.96 | 77.2 | 97.89 | 71.23 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-384x288-b15bc30d_20230504.pth) | @@ -288,12 +292,12 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 我们提供了 ImageNet 分类训练的 CSPNeXt 模型参数,更多细节请参考 [RTMDet](https://github.com/open-mmlab/mmdetection/blob/latest/configs/rtmdet/README.md#classification)。 -| Model | Input Size | Params(M) | Flops(G) | Top-1 (%) | Top-5 (%) | Download | -| :----------: | :--------: | :-------: | :------: | :-------: | :-------: | :---------------------------------------------------------------------------------------------------------------------------------: | -| CSPNeXt-tiny | 224x224 | 2.73 | 0.34 | 69.44 | 89.45 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-tiny_imagenet_600e-3a2dd350.pth) | -| CSPNeXt-s | 224x224 | 4.89 | 0.66 | 74.41 | 92.23 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth) | -| CSPNeXt-m | 224x224 | 13.05 | 1.93 | 79.27 | 94.79 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth) | -| CSPNeXt-l | 224x224 | 27.16 | 4.19 | 81.30 | 95.62 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-l_8xb256-rsb-a1-600e_in1k-6a760974.pth) | +| Model | Input Size | Params
(M) | Flops
(G) | Top-1 (%) | Top-5 (%) | Download | +| :----------: | :--------: | :----------------: | :---------------: | :-------: | :-------: | :---------------------------------------------------------------------------------------------------------------------------: | +| CSPNeXt-tiny | 224x224 | 2.73 | 0.34 | 69.44 | 89.45 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-tiny_imagenet_600e-3a2dd350.pth) | +| CSPNeXt-s | 224x224 | 4.89 | 0.66 | 74.41 | 92.23 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth) | +| CSPNeXt-m | 224x224 | 13.05 | 1.93 | 79.27 | 94.79 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth) | +| CSPNeXt-l | 224x224 | 27.16 | 4.19 | 81.30 | 95.62 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-l_8xb256-rsb-a1-600e_in1k-6a760974.pth) | ## 👀 可视化 [🔝](#-table-of-contents) diff --git a/projects/rtmpose/examples/PoseTracker-Android-Prototype/README.md b/projects/rtmpose/examples/PoseTracker-Android-Prototype/README.md new file mode 100644 index 0000000000..edce803106 --- /dev/null +++ b/projects/rtmpose/examples/PoseTracker-Android-Prototype/README.md @@ -0,0 +1,5 @@ +# PoseTracker-Android-Prototype + +PoseTracker Android Demo Prototype, which is based on [mmdeploy](https://github.com/open-mmlab/mmdeploy/tree/dev-1.x) + +Please refer to [Original Repository](https://github.com/hanrui1sensetime/PoseTracker-Android-Prototype). diff --git a/projects/rtmpose/examples/README.md b/projects/rtmpose/examples/README.md index d79dd852a4..e1e7ae72f8 100644 --- a/projects/rtmpose/examples/README.md +++ b/projects/rtmpose/examples/README.md @@ -9,3 +9,9 @@ RTMPose-Deploy is a C++ code example that does not use MMDEPLOY for RTMPose loca ### 2. RTMPose inference with ONNXRuntime (Python) This example shows how to run RTMPose inference with ONNXRuntime in Python. + +### 3. PoseTracker Android Demo + +PoseTracker Android Demo Prototype based on mmdeploy. + +- [Original Repository](https://github.com/hanrui1sensetime/PoseTracker-Android-Prototype) From 827483718ba89aacb6640b0417ec2ea031e58699 Mon Sep 17 00:00:00 2001 From: Tau Date: Thu, 25 May 2023 01:41:47 +0800 Subject: [PATCH 06/52] [Docs] Update RTMPose Docs (#2394) --- mmpose/models/heads/coord_cls_heads/rtmcc_head.py | 4 ++-- mmpose/models/heads/coord_cls_heads/simcc_head.py | 6 ++++-- projects/rtmpose/README.md | 7 ++++++- projects/rtmpose/README_CN.md | 7 ++++++- 4 files changed, 18 insertions(+), 6 deletions(-) diff --git a/mmpose/models/heads/coord_cls_heads/rtmcc_head.py b/mmpose/models/heads/coord_cls_heads/rtmcc_head.py index 94d613192c..5df0733c48 100644 --- a/mmpose/models/heads/coord_cls_heads/rtmcc_head.py +++ b/mmpose/models/heads/coord_cls_heads/rtmcc_head.py @@ -134,8 +134,8 @@ def __init__( def forward(self, feats: Tuple[Tensor]) -> Tuple[Tensor, Tensor]: """Forward the network. - The input is multi scale feature maps and the - output is the heatmap. + The input is the featuremap extracted by backbone and the + output is the simcc representation. Args: feats (Tuple[Tensor]): Multi scale feature maps. diff --git a/mmpose/models/heads/coord_cls_heads/simcc_head.py b/mmpose/models/heads/coord_cls_heads/simcc_head.py index b9287b7204..d9e7001cbc 100644 --- a/mmpose/models/heads/coord_cls_heads/simcc_head.py +++ b/mmpose/models/heads/coord_cls_heads/simcc_head.py @@ -198,8 +198,10 @@ def _make_deconv_head( return deconv_head def forward(self, feats: Tuple[Tensor]) -> Tuple[Tensor, Tensor]: - """Forward the network. The input is multi scale feature maps and the - output is the heatmap. + """Forward the network. + + The input is the featuremap extracted by backbone and the + output is the simcc representation. Args: feats (Tuple[Tensor]): Multi scale feature maps. diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index fe30d5d93e..cc2e300e6c 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -320,7 +320,7 @@ We also provide the ImageNet classification pre-trained weights of the CSPNeXt b We provide two appoaches to try RTMPose: - MMPose demo scripts -- Pre-compiled MMDeploy SDK (Recommended) +- Pre-compiled MMDeploy SDK (Recommend, 6-10 times faster) ### MMPose demo scripts @@ -364,6 +364,8 @@ MMDeploy provides a precompiled SDK for Pipeline reasoning on RTMPose projects, - All models must by exported by `tools/deploy.py` before PoseTracker can be used for inference. - For the tutorial of exporting the SDK version model, see [SDK Reasoning](#%EF%B8%8F-step3-inference-with-sdk), and for detailed parameter settings of inference, see [Pipeline Reasoning](#-step4-pipeline-inference). +- Exported SDK models (ONNX, TRT, ncnn, etc.) can be downloaded from [OpenMMLab Deploee](https://platform.openmmlab.com/deploee). +- You can also convert `.pth` models into SDK [online](https://platform.openmmlab.com/deploee/task-convert-list). #### Linux @@ -597,6 +599,9 @@ Please refer to [Train and Test](https://mmpose.readthedocs.io/en/latest/user_gu Here is a basic example of deploy RTMPose with [MMDeploy](https://github.com/open-mmlab/mmdeploy/tree/main). +- Exported SDK models (ONNX, TRT, ncnn, etc.) can be downloaded from [OpenMMLab Deploee](https://platform.openmmlab.com/deploee). +- You can also convert `.pth` models into SDK [online](https://platform.openmmlab.com/deploee/task-convert-list). + ### 🧩 Step1. Install MMDeploy Before starting the deployment, please make sure you install MMPose and MMDeploy correctly. diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index ee7109944b..6f2da2e662 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -311,7 +311,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 我们提供了两种途径来让用户尝试 RTMPose 模型: - MMPose demo 脚本 -- MMDeploy SDK 预编译包 (推荐) +- MMDeploy SDK 预编译包 (推荐,速度提升6-10倍) ### MMPose demo 脚本 @@ -357,6 +357,8 @@ MMDeploy 提供了预编译的 SDK,用于对 RTMPose 项目进行 Pipeline 推 - 所有的模型必须经过 `tools/deploy.py` 导出后才能使用 PoseTracker 进行推理。 - 导出 SDK 版模型的教程见 [SDK 推理](#%EF%B8%8F-sdk-推理),推理的详细参数设置见 [Pipeline 推理](#-pipeline-推理)。 +- 你可以从 [硬件模型库](https://platform.openmmlab.com/deploee) 直接下载 SDK 版模型(ONNX、 TRT、ncnn 等)。 +- 同时我们也支持 [在线模型转换](https://platform.openmmlab.com/deploee/task-convert-list)。 #### Linux\\ @@ -588,6 +590,9 @@ example\cpp\build\Release 本教程将展示如何通过 [MMDeploy](https://github.com/open-mmlab/mmdeploy/tree/main) 部署 RTMPose 项目。 +- 你可以从 [硬件模型库](https://platform.openmmlab.com/deploee) 直接下载 SDK 版模型(ONNX、 TRT、ncnn 等)。 +- 同时我们也支持 [在线模型转换](https://platform.openmmlab.com/deploee/task-convert-list)。 + ### 🧩 安装 在开始部署之前,首先你需要确保正确安装了 MMPose, MMDetection, MMDeploy,相关安装教程如下: From 9fbc320622c5ce8f6eb4590b4fd854be8d2226bd Mon Sep 17 00:00:00 2001 From: Tau Date: Thu, 25 May 2023 18:44:06 +0800 Subject: [PATCH 07/52] [Fix] Fix ncnn bugs caused by negative axis (#2399) --- mmpose/models/utils/rtmcc_block.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/mmpose/models/utils/rtmcc_block.py b/mmpose/models/utils/rtmcc_block.py index 0e317376b2..bd4929454c 100644 --- a/mmpose/models/utils/rtmcc_block.py +++ b/mmpose/models/utils/rtmcc_block.py @@ -105,7 +105,7 @@ def forward(self, x): torch.Tensor: The tensor after applying scale norm. """ - norm = torch.norm(x, dim=-1, keepdim=True) * self.scale + norm = torch.norm(x, dim=2, keepdim=True) * self.scale return x / norm.clamp(min=self.eps) * self.g @@ -243,29 +243,34 @@ def _forward(self, inputs): x = self.ln(x) + # [B, K, in_token_dims] -> [B, K, e + e + s] uv = self.uv(x) + uv = self.act_fn(uv) if self.attn_type == 'self-attn': - u, v, base = torch.split( - self.act_fn(uv), [self.e, self.e, self.s], dim=-1) - + # [B, K, e + e + s] -> [B, K, e], [B, K, e], [B, K, s] + u, v, base = torch.split(uv, [self.e, self.e, self.s], dim=2) + # [B, K, 1, s] * [1, 1, 2, s] + [2, s] -> [B, K, 2, s] base = base.unsqueeze(2) * self.gamma[None, None, :] + self.beta if self.pos_enc: base = rope(base, dim=1) - - q, k = torch.unbind(base, dim=-2) + # [B, K, 2, s] -> [B, K, s], [B, K, s] + q, k = torch.unbind(base, dim=2) else: - u, q = torch.split(self.act_fn(uv), [self.e, self.s], dim=-1) + # [B, K, e + s] -> [B, K, e], [B, K, s] + u, q = torch.split(uv, [self.e, self.s], dim=2) - k = self.k_fc(k) - v = self.v_fc(v) + k = self.k_fc(k) # -> [B, K, s] + v = self.v_fc(v) # -> [B, K, e] if self.pos_enc: q = rope(q, 1) k = rope(k, 1) + # [B, K, s].permute() -> [B, s, K] + # [B, K, s] x [B, s, K] -> [B, K, K] qk = torch.bmm(q, k.permute(0, 2, 1)) if self.use_rel_bias: @@ -274,13 +279,14 @@ def _forward(self, inputs): else: bias = self.rel_pos_bias(q.size(1), k.size(1)) qk += bias[:, :q.size(1), :k.size(1)] - + # [B, K, K] kernel = torch.square(F.relu(qk / self.sqrt_s)) if self.dropout_rate > 0.: kernel = self.dropout(kernel) - + # [B, K, K] x [B, K, e] -> [B, K, e] x = u * torch.bmm(kernel, v) + # [B, K, e] -> [B, K, out_token_dims] x = self.o(x) return x From 3c486f281839a541d9c6f1399d273d4ccaab8306 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=B0=B8=E9=9F=AC?= <53283758+Dominic23331@users.noreply.github.com> Date: Fri, 26 May 2023 19:01:22 +0800 Subject: [PATCH 08/52] [Feature] RTMPose TensorRT Deployment - C++ Pre and Post-processing Code Example (#2397) --- .../topdown_heatmap/README.md | 14 +- projects/rtmpose/examples/README.md | 7 +- .../rtmpose/examples/RTMPose-Deploy/README.md | 9 +- .../examples/RTMPose-Deploy/README_CN.md | 9 +- .../RTMPose-Deploy/Windows/TensorRT/README.md | 73 ++++++ .../Windows/TensorRT/python/convert_rtmdet.py | 115 ++++++++++ .../src/RTMPoseTensorRT/inference.cpp | 38 ++++ .../TensorRT/src/RTMPoseTensorRT/inference.h | 14 ++ .../TensorRT/src/RTMPoseTensorRT/main.cpp | 63 ++++++ .../TensorRT/src/RTMPoseTensorRT/rtmdet.cpp | 198 ++++++++++++++++ .../TensorRT/src/RTMPoseTensorRT/rtmdet.h | 40 ++++ .../TensorRT/src/RTMPoseTensorRT/rtmpose.cpp | 193 ++++++++++++++++ .../TensorRT/src/RTMPoseTensorRT/rtmpose.h | 43 ++++ .../TensorRT/src/RTMPoseTensorRT/utils.cpp | 212 ++++++++++++++++++ .../TensorRT/src/RTMPoseTensorRT/utils.h | 56 +++++ 15 files changed, 1068 insertions(+), 16 deletions(-) create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/README.md create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/python/convert_rtmdet.py create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.cpp create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.h create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/main.cpp create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.cpp create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.h create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.cpp create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.h create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.cpp create mode 100644 projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.h diff --git a/configs/animal_2d_keypoint/topdown_heatmap/README.md b/configs/animal_2d_keypoint/topdown_heatmap/README.md index e799273b8b..90a440dc28 100644 --- a/configs/animal_2d_keypoint/topdown_heatmap/README.md +++ b/configs/animal_2d_keypoint/topdown_heatmap/README.md @@ -59,10 +59,10 @@ Results on AnimalKingdom test set | Model | Input Size | class | PCK(0.05) | Details and Download | | :-------: | :--------: | :-----------: | :-------: | :---------------------------------------------------: | -| HRNet-w32 | 256x256 | P1 | 0.6272 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | -| HRNet-w32 | 256x256 | P2 | 0.3774 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | -| HRNet-w32 | 256x256 | P3_mammals | 0.5756 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | -| HRNet-w32 | 256x256 | P3_amphibians | 0.5356 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | -| HRNet-w32 | 256x256 | P3_reptiles | 0.5 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | -| HRNet-w32 | 256x256 | P3_birds | 0.7679 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | -| HRNet-w32 | 256x256 | P3_fishes | 0.636 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P1 | 0.6323 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P2 | 0.3741 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_mammals | 0.571 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_amphibians | 0.5358 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_reptiles | 0.51 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_birds | 0.7671 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | +| HRNet-w32 | 256x256 | P3_fishes | 0.6406 | [hrnet_animalkingdom.md](./ak/hrnet_animalkingdom.md) | diff --git a/projects/rtmpose/examples/README.md b/projects/rtmpose/examples/README.md index e1e7ae72f8..5846f039e7 100644 --- a/projects/rtmpose/examples/README.md +++ b/projects/rtmpose/examples/README.md @@ -1,10 +1,11 @@ ## List of examples -### 1. RTMPose-Deploy (without MMDeploy) +### 1. RTMPose-Deploy -RTMPose-Deploy is a C++ code example that does not use MMDEPLOY for RTMPose localized deployment. +RTMPose-Deploy is a C++ code example for RTMPose localized deployment. -- [Original Repository](https://github.com/HW140701/RTMPose-Deploy) +- [ONNXRuntime-CPU](https://github.com/HW140701/RTMPose-Deploy) +- [TensorRT](https://github.com/Dominic23331/rtmpose_tensorrt) ### 2. RTMPose inference with ONNXRuntime (Python) diff --git a/projects/rtmpose/examples/RTMPose-Deploy/README.md b/projects/rtmpose/examples/RTMPose-Deploy/README.md index 44da240da4..c4fce9a4df 100644 --- a/projects/rtmpose/examples/RTMPose-Deploy/README.md +++ b/projects/rtmpose/examples/RTMPose-Deploy/README.md @@ -2,8 +2,11 @@ [中文说明](./README_CN.md) -RTMPose-Deploy is a C ++ code example that does **NOT** use MMDEPLOY for RTMPose localized deployment. +RTMPose-Deploy is a C ++ code example for RTMPose localized deployment. -At present, RTMPose-Deploy has completed the deployment of RTMDetnano and RTMPose on the Windows system. This example only contains the source code. If you want a complete project example, please refer to:[https://github.com/HW140701/RTMPose-Deploy](https://github.com/HW140701/RTMPose-Deploy) . This project provides a complete VS2019 project and release package. +At present, RTMPose-Deploy has completed to use ONNXRuntime-CPU and TensorRT to deploy the RTMDet and RTMPose on the Windows system. -Subsequent will consider adding the use of C ++ Tensorrt SDK on the Windows system to deploy RTMDet-nano and RTMPose. +| Deployment Framework | Repo | +| -------------------- | -------------------------------------------------------------------- | +| ONNXRuntime-CPU | [RTMPose-Deploy](https://github.com/HW140701/RTMPose-Deploy) | +| TensorRT | [rtmpose_tensorrt](https://github.com/Dominic23331/rtmpose_tensorrt) | diff --git a/projects/rtmpose/examples/RTMPose-Deploy/README_CN.md b/projects/rtmpose/examples/RTMPose-Deploy/README_CN.md index 3469108640..82ee093658 100644 --- a/projects/rtmpose/examples/RTMPose-Deploy/README_CN.md +++ b/projects/rtmpose/examples/RTMPose-Deploy/README_CN.md @@ -1,7 +1,10 @@ # RTMPose-Deploy -RTMPose-Deploy 是一个**不使用** mmdeploy 进行 RTMPose 本地化部署的 C++ 代码示例。 +RTMPose-Deploy 是一个进行 RTMPose 本地化部署的 C++ 代码示例。 -目前,RTMPose-Deploy 已完成在 Windows 系统上使用 OnnxRuntime CPU 对 RTMDet-nano 和 RTMPose 完成了部署。本示例只包含了源代码,如果你想要完整的项目示例可以参考:[https://github.com/HW140701/RTMPose-Deploy](https://github.com/HW140701/RTMPose-Deploy),这个仓库提供了完整的 VS2019 工程和发布包。 +目前,RTMPose-Deploy 已完成在 Windows 系统上使用 OnnxRuntime CPU 和TensorRT 对 RTMDet 和 RTMPose 完成了部署。 -后续会考虑添加在 Windows 系统上使用 C++ TensorRT SDK 对 RTMDet-nano 和 RTMPose 进行部署。 +| 部署框架 | 仓库 | +| --------------- | -------------------------------------------------------------------- | +| ONNXRuntime-CPU | [RTMPose-Deploy](https://github.com/HW140701/RTMPose-Deploy) | +| TensorRT | [rtmpose_tensorrt](https://github.com/Dominic23331/rtmpose_tensorrt) | diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/README.md b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/README.md new file mode 100644 index 0000000000..c9615d89d3 --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/README.md @@ -0,0 +1,73 @@ +# rtmpose_tensorrt + +## Description + +This repository is use the TensorRT to deploy RTMDet and RTMPose. Your computer should have these components: + +- NVIDIA GPU +- CUDA +- cudnn +- TensorRT 8.x +- OPENCV +- VS2019 + +The effect of the code is as follows: + +![mabaoguo](https://github.com/Dominic23331/rtmpose_tensorrt/assets/53283758/568563be-a31d-4d03-9629-842dad3745e2) + +## Get Started + +### I. Convert Model + +#### 1. RTMDet + +When you start to convert a RTMDet model, you can use **convert_rtmdet.py** to convert pth file to onnx. + +```shell +python convert_rtmdet.py --config --checkpoint --output +``` + +Note that RTMDet should be the mmdetection version, and the conversion of mmyolo is not supported. + +#### 2. RTMPose + +You can use mmdeploy to convert RTMPose. The mmdeploy config file should use **configs/mmpose/pose-detection_simcc_onnxruntime_dynamic.py**. The convert command as follow: + +```shell +python tools/deploy.py +``` + +#### 3. Convert to TensorRT engine file + +You can use trtexec to convert an ONNX file to engine file. The command as follow: + +``` +trtexec --onnx= --saveEngine= +``` + +**Note that the engine files included in the project are only for storing examples. As the engine files generated by TensorRT are related to hardware, it is necessary to regenerate the engine files on the computer where the code needs to be run.** + +### II. Run + +At first, you should fill in the model locations for RTMDet and RTMPose as follows: + +```c++ +// set engine file path +string detEngineFile = "./model/rtmdet.engine"; +string poseEngineFile = "./model/rtmpose_m.engine"; +``` + +Then, you can set the cap to video file or camera. + +``` +// open cap +cv::VideoCapture cap(0); +``` + +If you want to change iou threshold or confidence threshold, you can change them when you initialize RTMDet model. + +``` +RTMDet det_model(detEngineFile, logger, 0.5, 0.65); +``` + +Finally, you can run the **main.cpp** file to get result. diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/python/convert_rtmdet.py b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/python/convert_rtmdet.py new file mode 100644 index 0000000000..81196413dd --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/python/convert_rtmdet.py @@ -0,0 +1,115 @@ +import argparse + +import torch +import torch.nn.functional as F +from mmdet.apis import init_detector +from torch import nn + + +def build_model_from_cfg(config_path: str, checkpoint_path: str, device): + model = init_detector(config_path, checkpoint_path, device=device) + model.eval() + return model + + +class RTMDet(nn.Module): + """Load RTMDet model and add postprocess. + + Args: + model (nn.Module): The RTMDet model. + """ + + def __init__(self, model: nn.Module) -> None: + super().__init__() + self.model = model + self.stage = [80, 40, 20] + self.input_shape = 640 + + def forward(self, inputs): + """model forward function.""" + boxes = [] + neck_outputs = self.model(inputs) + for i, (cls, box) in enumerate(zip(*neck_outputs)): + cls = cls.permute(0, 2, 3, 1) + box = box.permute(0, 2, 3, 1) + box = self.decode(box, cls, i) + boxes.append(box) + result_box = torch.cat(boxes, dim=1) + return result_box + + def decode(self, box: torch.Tensor, cls: torch.Tensor, stage: int): + """RTMDet postprocess function. + + Args: + box (torch.Tensor): output boxes. + cls (torch.Tensor): output cls. + stage (int): RTMDet output stage. + + Returns: + torch.Tensor: The decode boxes. + Format is [x1, y1, x2, y2, class, confidence] + """ + cls = F.sigmoid(cls) + conf = torch.max(cls, dim=3, keepdim=True)[0] + cls = torch.argmax(cls, dim=3, keepdim=True).to(torch.float32) + + box = torch.cat([box, cls, conf], dim=-1) + + step = self.input_shape // self.stage[stage] + + block_step = torch.linspace( + 0, self.stage[stage] - 1, steps=self.stage[stage], + device='cuda') * step + block_x = torch.broadcast_to(block_step, + [self.stage[stage], self.stage[stage]]) + block_y = torch.transpose(block_x, 1, 0) + block_x = torch.unsqueeze(block_x, 0) + block_y = torch.unsqueeze(block_y, 0) + block = torch.stack([block_x, block_y], -1) + + box[..., :2] = block - box[..., :2] + box[..., 2:4] = block + box[..., 2:4] + box = box.reshape(1, -1, 6) + return box + + +def parse_args(): + parser = argparse.ArgumentParser( + description='convert rtmdet model to ONNX.') + parser.add_argument( + '--config', type=str, help='rtmdet config file path from mmdetection.') + parser.add_argument( + '--checkpoint', + type=str, + help='rtmdet checkpoint path from mmdetection.') + parser.add_argument('--output', type=str, help='output filename.') + parser.add_argument( + '--device', + type=str, + default='cuda:0', + help='Device used for inference') + parser.add_argument( + '--input-name', type=str, default='image', help='ONNX input name.') + parser.add_argument( + '--output-name', type=str, default='output', help='ONNX output name.') + parser.add_argument( + '--opset', type=int, default=11, help='ONNX opset version.') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + + model = build_model_from_cfg(args.config, args.checkpoint, args.device) + rtmdet = RTMDet(model) + rtmdet.eval() + x = torch.randn((1, 3, 640, 640), device=args.device) + + torch.onnx.export( + rtmdet, + x, + args.output, + input_names=[args.input_name], + output_names=[args.output_name], + opset_version=args.opset) diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.cpp b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.cpp new file mode 100644 index 0000000000..bc4e8449a7 --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.cpp @@ -0,0 +1,38 @@ +#include "inference.h" + + +/** + * @brief Inference network + * @param image Input image + * @param detect_model RTMDet model + * @param pose_model RTMPose model + * @return Inference result +*/ +std::vector> inference(cv::Mat& image, RTMDet& detect_model, RTMPose& pose_model) +{ + cv::Mat im0; + image.copyTo(im0); + + // inference detection model + std::vector det_result = detect_model.predict(image); + std::vector> result; + for (int i = 0; i < det_result.size(); i++) + { + // Select the detection box labeled as human + if (!isEqual(det_result[i].cls, 0.0)) + continue; + + // cut image to input the pose model + cv::Mat person_image = img_cut(im0, det_result[i].x1, det_result[i].y1, det_result[i].x2, det_result[i].y2); + std::vector pose_result = pose_model.predict(person_image); + + // Restore points to original image + for (int j = 0; j < pose_result.size(); j++) + { + pose_result[j].x += det_result[i].x1; + pose_result[j].y += det_result[i].y1; + } + result.push_back(pose_result); + } + return result; +} diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.h b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.h new file mode 100644 index 0000000000..8f603ffc1c --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/inference.h @@ -0,0 +1,14 @@ +#pragma once +#include +#include + +#include +#include + +#include "rtmdet.h" +#include "rtmpose.h" +#include "utils.h" + + + +std::vector> inference(cv::Mat& image, RTMDet& detect_model, RTMPose& pose_model); diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/main.cpp b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/main.cpp new file mode 100644 index 0000000000..3799bca896 --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/main.cpp @@ -0,0 +1,63 @@ +#include +#include +#include +#include +#include + +#include "rtmdet.h" +#include "rtmpose.h" +#include "utils.h" +#include "inference.h" + + +using namespace std; + +/** + * @brief Setting up Tensorrt logger +*/ +class Logger : public nvinfer1::ILogger +{ + void log(Severity severity, const char* msg) noexcept override + { + // Only output logs with severity greater than warning + if (severity <= Severity::kWARNING) + std::cout << msg << std::endl; + } +}logger; + + +int main() +{ + // set engine file path + string detEngineFile = "./model/rtmdet.engine"; + string poseEngineFile = "./model/rtmpose_m.engine"; + + // init model + RTMDet det_model(detEngineFile, logger); + RTMPose pose_model(poseEngineFile, logger); + + // open cap + cv::VideoCapture cap(0); + + while (cap.isOpened()) + { + cv::Mat frame; + cv::Mat show_frame; + cap >> frame; + + if (frame.empty()) + break; + + frame.copyTo(show_frame); + auto result = inference(frame, det_model, pose_model); + draw_pose(show_frame, result); + + cv::imshow("result", show_frame); + if (cv::waitKey(1) == 'q') + break; + } + cv::destroyAllWindows(); + cap.release(); + + return 0; +} diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.cpp b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.cpp new file mode 100644 index 0000000000..abc8ebd32d --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.cpp @@ -0,0 +1,198 @@ +#include "rtmdet.h" + + +// set network params +float RTMDet::input_h = 640; +float RTMDet::input_w = 640; +float RTMDet::mean[3] = { 123.675, 116.28, 103.53 }; +float RTMDet::std[3] = { 58.395, 57.12, 57.375 }; + +/** + * @brief RTMDet`s constructor + * @param model_path RTMDet engine file path + * @param logger Nvinfer ILogger + * @param conf_thre The confidence threshold + * @param iou_thre The iou threshold of nms +*/ +RTMDet::RTMDet(std::string model_path, nvinfer1::ILogger& logger, float conf_thre, float iou_thre) : conf_thre(conf_thre), iou_thre(iou_thre) +{ + // read the engine file + std::ifstream engineStream(model_path, std::ios::binary); + engineStream.seekg(0, std::ios::end); + const size_t modelSize = engineStream.tellg(); + engineStream.seekg(0, std::ios::beg); + std::unique_ptr engineData(new char[modelSize]); + engineStream.read(engineData.get(), modelSize); + engineStream.close(); + + // create tensorrt model + runtime = nvinfer1::createInferRuntime(logger); + engine = runtime->deserializeCudaEngine(engineData.get(), modelSize); + context = engine->createExecutionContext(); + + // Define input dimensions + context->setBindingDimensions(0, nvinfer1::Dims4(1, 3, input_h, input_w)); + + // create CUDA stream + cudaStreamCreate(&stream); + + // Initialize offset + offset.push_back(0); + offset.push_back(0); +} + + +/** + * @brief RTMDet`s destructor +*/ +RTMDet::~RTMDet() +{ + cudaFree(stream); + cudaFree(buffer[0]); + cudaFree(buffer[1]); +} + + +/** + * @brief Display network input and output parameters +*/ +void RTMDet::show() +{ + for (int i = 0; i < engine->getNbBindings(); i++) + { + std::cout << "node: " << engine->getBindingName(i) << ", "; + if (engine->bindingIsInput(i)) + { + std::cout << "type: input" << ", "; + } + else + { + std::cout << "type: output" << ", "; + } + nvinfer1::Dims dim = engine->getBindingDimensions(i); + std::cout << "dimensions: "; + for (int d = 0; d < dim.nbDims; d++) + { + std::cout << dim.d[d] << " "; + } + std::cout << "\n"; + } +} + + +/** + * @brief Network preprocessing function + * @param image Input image + * @return Processed Tensor +*/ +std::vector RTMDet::preprocess(cv::Mat& image) +{ + // resize image + std::tuple resized = resize(image, input_w, input_h); + cv::Mat resized_image = std::get<0>(resized); + offset[0] = std::get<1>(resized); + offset[1] = std::get<2>(resized); + + // BGR2RGB + cv::cvtColor(resized_image, resized_image, cv::COLOR_BGR2RGB); + + // subtract mean and divide variance + std::vector input_tensor; + for (int k = 0; k < 3; k++) + { + for (int i = 0; i < resized_image.rows; i++) + { + for (int j = 0; j < resized_image.cols; j++) + { + input_tensor.emplace_back(((float)resized_image.at(i, j)[k] - mean[k]) / std[k]); + } + } + } + + return input_tensor; +} + + +/** + * @brief Network post-processing function + * @param boxes_result The result of rtmdet + * @param img_w The width of input image + * @param img_h The height of input image + * @return Detect boxes +*/ +std::vector RTMDet::postprocess(std::vector boxes_result, int img_w, int img_h) +{ + std::vector result; + std::vector buff; + for (int i = 0; i < 8400; i++) + { + // x1, y1, x2, y2, class, confidence + buff.insert(buff.end(), boxes_result.begin() + i * 6, boxes_result.begin() + i * 6 + 6); + // drop the box which confidence less than threshold + if (buff[5] < conf_thre) + { + buff.clear(); + continue; + } + + Box box; + box.x1 = buff[0]; + box.y1 = buff[1]; + box.x2 = buff[2]; + box.y2 = buff[3]; + box.cls = buff[4]; + box.conf = buff[5]; + result.emplace_back(box); + buff.clear(); + } + + // nms + result = non_maximum_suppression(result, iou_thre); + + // return the box to real image + for (int i = 0; i < result.size(); i++) + { + result[i].x1 = MAX((result[i].x1 - offset[0]) * img_w / (input_w - 2 * offset[0]), 0); + result[i].y1 = MAX((result[i].y1 - offset[1]) * img_h / (input_h - 2 * offset[1]), 0); + result[i].x2 = MIN((result[i].x2 - offset[0]) * img_w / (input_w - 2 * offset[0]), img_w); + result[i].y2 = MIN((result[i].y2 - offset[1]) * img_h / (input_h - 2 * offset[1]), img_h); + } + + return result; +} + + +/** + * @brief Predict function + * @param image Input image + * @return Predict results +*/ +std::vector RTMDet::predict(cv::Mat& image) +{ + // get input image size + int img_w = image.cols; + int img_h = image.rows; + std::vector input = preprocess(image); + + // apply for GPU space + cudaMalloc(&buffer[0], 3 * input_h * input_w * sizeof(float)); + cudaMalloc(&buffer[1], 8400 * 6 * sizeof(float)); + + // copy data to GPU + cudaMemcpyAsync(buffer[0], input.data(), 3 * input_h * input_w * sizeof(float), cudaMemcpyHostToDevice, stream); + + // network inference + context->enqueueV2(buffer, stream, nullptr); + cudaStreamSynchronize(stream); + + // get result from GPU + std::vector boxes_result(8400 * 6); + cudaMemcpyAsync(boxes_result.data(), buffer[1], 8400 * 6 * sizeof(float), cudaMemcpyDeviceToHost); + + std::vector result = postprocess(boxes_result, img_w, img_h); + + cudaFree(buffer[0]); + cudaFree(buffer[1]); + + return result; +} diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.h b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.h new file mode 100644 index 0000000000..7a30a9d48e --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmdet.h @@ -0,0 +1,40 @@ +#pragma once +#include +#include +#include +#include +#include +#include + +#include "utils.h" + + + +class RTMDet +{ +public: + RTMDet(std::string model_path, nvinfer1::ILogger& logger, float conf_thre=0.5, float iou_thre=0.65); + void show(); + std::vector predict(cv::Mat& image); + ~RTMDet(); + +private: + static float input_w; + static float input_h; + static float mean[3]; + static float std[3]; + + float conf_thre; + float iou_thre; + std::vector offset; + + nvinfer1::IRuntime* runtime; + nvinfer1::ICudaEngine* engine; + nvinfer1::IExecutionContext* context; + + void* buffer[2]; + cudaStream_t stream; + + std::vector preprocess(cv::Mat& image); + std::vector postprocess(std::vector boxes_result, int img_w, int img_h); +}; diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.cpp b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.cpp new file mode 100644 index 0000000000..1a190ceda2 --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.cpp @@ -0,0 +1,193 @@ +#include "rtmpose.h" + + +// set network params +float RTMPose::input_h = 256; +float RTMPose::input_w = 192; +int RTMPose::extend_width = 384; +int RTMPose::extend_height = 512; +int RTMPose::num_points = 17; +float RTMPose::mean[3] = { 123.675, 116.28, 103.53 }; +float RTMPose::std[3] = { 58.395, 57.12, 57.375 }; + +/** + * @brief RTMPose`s constructor + * @param model_path RTMPose engine file path + * @param logger Nvinfer ILogger +*/ +RTMPose::RTMPose(std::string model_path, nvinfer1::ILogger& logger) +{ + // read the engine file + std::ifstream engineStream(model_path, std::ios::binary); + engineStream.seekg(0, std::ios::end); + const size_t modelSize = engineStream.tellg(); + engineStream.seekg(0, std::ios::beg); + std::unique_ptr engineData(new char[modelSize]); + engineStream.read(engineData.get(), modelSize); + engineStream.close(); + + // create tensorrt model + runtime = nvinfer1::createInferRuntime(logger); + engine = runtime->deserializeCudaEngine(engineData.get(), modelSize); + context = engine->createExecutionContext(); + + // Define input dimensions + context->setBindingDimensions(0, nvinfer1::Dims4(1, 3, input_h, input_w)); + + // create CUDA stream + cudaStreamCreate(&stream); + + // Initialize offset + offset.push_back(0); + offset.push_back(0); +} + +/** + * @brief RTMPose`s destructor +*/ +RTMPose::~RTMPose() +{ + cudaFree(stream); + cudaFree(buffer[0]); + cudaFree(buffer[1]); + cudaFree(buffer[2]); +} + + +/** + * @brief Display network input and output parameters +*/ +void RTMPose::show() +{ + for (int i = 0; i < engine->getNbBindings(); i++) + { + std::cout << "node: " << engine->getBindingName(i) << ", "; + if (engine->bindingIsInput(i)) + { + std::cout << "type: input" << ", "; + } + else + { + std::cout << "type: output" << ", "; + } + nvinfer1::Dims dim = engine->getBindingDimensions(i); + std::cout << "dimensions: "; + for (int d = 0; d < dim.nbDims; d++) + { + std::cout << dim.d[d] << " "; + } + std::cout << "\n"; + } +} + + +/** + * @brief Network preprocessing function + * @param image Input image + * @return Processed Tensor +*/ +std::vector RTMPose::preprocess(cv::Mat& image) +{ + // resize image + std::tuple resized = resize(image, input_w, input_h); + cv::Mat resized_image = std::get<0>(resized); + offset[0] = std::get<1>(resized); + offset[1] = std::get<2>(resized); + + // BGR2RGB + cv::cvtColor(resized_image, resized_image, cv::COLOR_BGR2RGB); + + // subtract mean and divide variance + std::vector input_tensor; + for (int k = 0; k < 3; k++) + { + for (int i = 0; i < resized_image.rows; i++) + { + for (int j = 0; j < resized_image.cols; j++) + { + input_tensor.emplace_back(((float)resized_image.at(i, j)[k] - mean[k]) / std[k]); + } + } + } + + return input_tensor; +} + + +/** + * @brief Network post-processing function + * @param simcc_x_result SimCC x dimension output + * @param simcc_y_result SimCC y dimension output + * @param img_w The width of input image + * @param img_h The height of input image + * @return +*/ +std::vector RTMPose::postprocess(std::vector simcc_x_result, std::vector simcc_y_result, int img_w, int img_h) +{ + std::vector pose_result; + for (int i = 0; i < num_points; ++i) + { + // find the maximum and maximum indexes in the value of each Extend_width length + auto x_biggest_iter = std::max_element(simcc_x_result.begin() + i * extend_width, simcc_x_result.begin() + i * extend_width + extend_width); + int max_x_pos = std::distance(simcc_x_result.begin() + i * extend_width, x_biggest_iter); + int pose_x = max_x_pos / 2; + float score_x = *x_biggest_iter; + + // find the maximum and maximum indexes in the value of each exten_height length + auto y_biggest_iter = std::max_element(simcc_y_result.begin() + i * extend_height, simcc_y_result.begin() + i * extend_height + extend_height); + int max_y_pos = std::distance(simcc_y_result.begin() + i * extend_height, y_biggest_iter); + int pose_y = max_y_pos / 2; + float score_y = *y_biggest_iter; + + // get point confidence + float score = MAX(score_x, score_y); + + PosePoint temp_point; + temp_point.x = (pose_x - offset[0]) * img_w / (input_w - 2 * offset[0]); + temp_point.y = (pose_y - offset[1]) * img_h / (input_h - 2 * offset[1]); + temp_point.score = score; + pose_result.emplace_back(temp_point); + } + + return pose_result; +} + + +/** + * @brief Predict function + * @param image Input image + * @return Predict results +*/ +std::vector RTMPose::predict(cv::Mat& image) +{ + // get input image size + int img_w = image.cols; + int img_h = image.rows; + std::vector input = preprocess(image); + + // apply for GPU space + cudaMalloc(&buffer[0], 3 * input_h * input_w * sizeof(float)); + cudaMalloc(&buffer[1], num_points * extend_width * sizeof(float)); + cudaMalloc(&buffer[2], num_points * extend_height * sizeof(float)); + + // copy data to GPU + cudaMemcpyAsync(buffer[0], input.data(), 3 * input_h * input_w * sizeof(float), cudaMemcpyHostToDevice, stream); + + // network inference + context->enqueueV2(buffer, stream, nullptr); + cudaStreamSynchronize(stream); + + // get result from GPU + std::vector simcc_x_result(num_points * extend_width); + std::vector simcc_y_result(num_points * extend_height); + cudaMemcpyAsync(simcc_x_result.data(), buffer[1], num_points * extend_width * sizeof(float), cudaMemcpyDeviceToHost); + cudaMemcpyAsync(simcc_y_result.data(), buffer[2], num_points * extend_height * sizeof(float), cudaMemcpyDeviceToHost); + + std::vector pose_result = postprocess(simcc_x_result, simcc_y_result, img_w, img_h); + + cudaFree(buffer[0]); + cudaFree(buffer[1]); + cudaFree(buffer[2]); + + return pose_result; +} diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.h b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.h new file mode 100644 index 0000000000..0b1bca4924 --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/rtmpose.h @@ -0,0 +1,43 @@ +#pragma once +#include +#include +#include +#include +#include +#include +#include +#include + +#include "utils.h" + + + +class RTMPose +{ +public: + RTMPose(std::string model_path, nvinfer1::ILogger &logger); + void show(); + std::vector predict(cv::Mat& image); + ~RTMPose(); + +private: + static float input_w; + static float input_h; + static int extend_width; + static int extend_height; + static float mean[3]; + static float std[3]; + static int num_points; + + std::vector offset; + + nvinfer1::IRuntime* runtime; + nvinfer1::ICudaEngine* engine; + nvinfer1::IExecutionContext* context; + + void* buffer[3]; + cudaStream_t stream; + + std::vector preprocess(cv::Mat& image); + std::vector postprocess(std::vector simcc_x_result, std::vector simcc_y_result, int img_w, int img_h); +}; diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.cpp b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.cpp new file mode 100644 index 0000000000..053b9e5a58 --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.cpp @@ -0,0 +1,212 @@ +#include "utils.h" + + +// set points links +std::vector> coco_17_joint_links = { + {0,1},{0,2},{1,3},{2,4},{5,7},{7,9},{6,8},{8,10},{5,6}, + {5,11},{6,12},{11,12},{11,13},{13,15},{12,14},{14,16} +}; + + +/** + * @brief Mix two images + * @param srcImage Original image + * @param mixImage Past image + * @param startPoint Start point + * @return Success or not +*/ +bool MixImage(cv::Mat& srcImage, cv::Mat mixImage, cv::Point startPoint) +{ + + if (!srcImage.data || !mixImage.data) + { + return false; + } + + int addCols = startPoint.x + mixImage.cols > srcImage.cols ? 0 : mixImage.cols; + int addRows = startPoint.y + mixImage.rows > srcImage.rows ? 0 : mixImage.rows; + if (addCols == 0 || addRows == 0) + { + return false; + } + + cv::Mat roiImage = srcImage(cv::Rect(startPoint.x, startPoint.y, addCols, addRows)); + + mixImage.copyTo(roiImage, mixImage); + return true; +} + + +/** + * @brief Resize image + * @param img Input image + * @param w Resized width + * @param h Resized height + * @return Resized image and offset +*/ +std::tuple resize(cv::Mat& img, int w, int h) +{ + cv::Mat result; + + int ih = img.rows; + int iw = img.cols; + + float scale = MIN(float(w) / float(iw), float(h) / float(ih)); + int nw = iw * scale; + int nh = ih * scale; + + cv::resize(img, img, cv::Size(nw, nh)); + result = cv::Mat::ones(cv::Size(w, h), CV_8UC1) * 128; + cv::cvtColor(result, result, cv::COLOR_GRAY2RGB); + cv::cvtColor(img, img, cv::COLOR_BGR2RGB); + + bool ifg = MixImage(result, img, cv::Point((w - nw) / 2, (h - nh) / 2)); + if (!ifg) + { + std::cerr << "MixImage failed" << std::endl; + abort(); + } + + std::tuple res_tuple = std::make_tuple(result, (w - nw) / 2, (h - nh) / 2); + + return res_tuple; +} + + +/** + * @brief Compare two boxes + * @param b1 Box1 + * @param b2 Box2 + * @return Compare result +*/ +bool compare_boxes(const Box& b1, const Box& b2) +{ + return b1.conf < b2.conf; +} + + +/** + * @brief Iou function + * @param b1 Box1 + * @param b2 Box2 + * @return Iou +*/ +float intersection_over_union(const Box& b1, const Box& b2) +{ + float x1 = std::max(b1.x1, b2.x1); + float y1 = std::max(b1.y1, b2.y1); + float x2 = std::min(b1.x2, b2.x2); + float y2 = std::min(b1.y2, b2.y2); + + // get intersection + float box_intersection = std::max((float)0, x2 - x1) * std::max((float)0, y2 - y1); + + // get union + float area1 = (b1.x2 - b1.x1) * (b1.y2 - b1.y1); + float area2 = (b2.x2 - b2.x1) * (b2.y2 - b2.y1); + float box_union = area1 + area2 - box_intersection; + + // To prevent the denominator from being zero, add a very small numerical value to the denominator + float iou = box_intersection / (box_union + 0.0001); + + return iou; +} + + +/** + * @brief Non-Maximum Suppression function + * @param boxes Input boxes + * @param iou_thre Iou threshold + * @return Boxes after nms +*/ +std::vector non_maximum_suppression(std::vector boxes, float iou_thre) +{ + // Sort boxes based on confidence + std::sort(boxes.begin(), boxes.end(), compare_boxes); + + std::vector result; + std::vector temp; + while (!boxes.empty()) + { + temp.clear(); + + Box chosen_box = boxes.back(); + boxes.pop_back(); + for (int i = 0; i < boxes.size(); i++) + { + if (boxes[i].cls != chosen_box.cls || intersection_over_union(boxes[i], chosen_box) < iou_thre) + temp.push_back(boxes[i]); + } + + boxes = temp; + result.push_back(chosen_box); + } + return result; +} + + +/** + * @brief Cut image + * @param image Input image + * @param x1 The left coordinate of cut box + * @param y1 The top coordinate of cut box + * @param x2 The right coordinate of cut box + * @param y2 The bottom coordinate of cut box + * @return Cut image +*/ +cv::Mat img_cut(cv::Mat& image, int x1, int y1, int x2, int y2) +{ + cv::Rect roi(x1, y1, x2 - x1, y2 - y1); + cv::Mat croppedImage = image(roi); + return croppedImage; +} + + +/** + * @brief Judge whether two floating point numbers are equal + * @param a Number a + * @param b Number b + * @return Result +*/ +bool isEqual(float a, float b) +{ + return std::fabs(a - b) < 1e-5; +} + + +/** + * @brief Draw detection result to image + * @param image Input image + * @param points Detection result +*/ +void draw_pose(cv::Mat& image, std::vector> points) +{ + for (int p = 0; p < points.size(); p++) + { + // draw points links + for (int i = 0; i < coco_17_joint_links.size(); i++) + { + std::pair joint_link = coco_17_joint_links[i]; + cv::line( + image, + cv::Point(points[p][joint_link.first].x, points[p][joint_link.first].y), + cv::Point(points[p][joint_link.second].x, points[p][joint_link.second].y), + cv::Scalar{ 0, 255, 0 }, + 2, + cv::LINE_AA + ); + } + //draw points + for (int i = 0; i < points[p].size(); i++) + { + cv::circle( + image, + cv::Point(points[p][i].x, points[p][i].y), + 1, + cv::Scalar{ 0, 0, 255 }, + 5, + cv::LINE_AA + ); + } + } +} diff --git a/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.h b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.h new file mode 100644 index 0000000000..fa165c03ec --- /dev/null +++ b/projects/rtmpose/examples/RTMPose-Deploy/Windows/TensorRT/src/RTMPoseTensorRT/utils.h @@ -0,0 +1,56 @@ +#pragma once +#include +#include +#include +#include +#include + + +/** + * @brief Key point structure +*/ +struct PosePoint +{ + int x; + int y; + float score; + + PosePoint() + { + x = 0; + y = 0; + score = 0.0; + } +}; + +/** + * @brief Detection box structure +*/ +struct Box +{ + float x1; + float y1; + float x2; + float y2; + int cls; + float conf; + + Box() + { + x1 = 0; + y1 = 0; + x2 = 0; + y2 = 0; + cls = 0; + conf = 0; + } +}; + +bool MixImage(cv::Mat& srcImage, cv::Mat mixImage, cv::Point startPoint); +std::tuple resize(cv::Mat& img, int w, int h); +bool compare_boxes(const Box& b1, const Box& b2); +float intersection_over_union(const Box& b1, const Box& b2); +std::vector non_maximum_suppression(std::vector boxes, float iou_thre); +cv::Mat img_cut(cv::Mat& image, int x1, int y1, int x2, int y2); +bool isEqual(float a, float b); +void draw_pose(cv::Mat& image, std::vector> points); From a910fd4c5684b0480f561efd703635d817944568 Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 29 May 2023 14:30:09 +0800 Subject: [PATCH 09/52] [Feature] Update RTMPose Face models (#2405) --- .../rtmpose-m_8xb64-210e_ap10k-256x256.py | 3 +- .../rtmpose-l_8xb256-210e_body8-256x192.py | 2 +- .../rtmpose-l_8xb256-210e_body8-384x288.py | 2 +- .../rtmpose-m_8xb256-210e_body8-256x192.py | 2 +- .../rtmpose-m_8xb256-210e_body8-384x288.py | 2 +- .../rtmpose-s_8xb256-210e_body8-256x192.py | 2 +- .../rtmpose-t_8xb256-210e_body8-256x192.py | 2 +- .../rtmpose-l_8xb256-420e_aic-coco-256x192.py | 2 +- .../rtmpose-l_8xb256-420e_aic-coco-384x288.py | 2 +- .../rtmpose-l_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-m_8xb256-420e_aic-coco-256x192.py | 2 +- .../rtmpose-m_8xb256-420e_aic-coco-384x288.py | 2 +- .../rtmpose-m_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-s_8xb256-420e_aic-coco-256x192.py | 2 +- .../rtmpose-s_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-t_8xb256-420e_aic-coco-256x192.py | 2 +- .../rtmpose-t_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-m_8xb64-210e_crowdpose-256x192.py | 3 +- .../mpii/rtmpose-m_8xb64-210e_mpii-256x256.py | 2 +- ...netv2_wo-deconv-8xb64-210e_coco-256x192.py | 2 +- .../simcc_res50_8xb32-140e_coco-384x288.py | 2 +- .../simcc_res50_8xb64-210e_coco-256x192.py | 2 +- ...mcc_vipnas-mbv3_8xb64-210e_coco-256x192.py | 2 +- ...res50_wo-deconv-8xb64-210e_mpii-256x256.py | 2 +- ...pnext-m_udp_8xb64-210e_crowpose-256x192.py | 1 - ...m_8xb32-60e_coco-wholebody-face-256x256.py | 3 +- .../rtmpose-m_8xb256-120e_face6-256x256.py | 690 +++++++++++++++++ .../rtmpose-s_8xb256-120e_face6-256x256.py | 691 ++++++++++++++++++ .../rtmpose-t_8xb256-120e_face6-256x256.py | 689 +++++++++++++++++ .../rtmpose/face6/rtmpose_face6.md | 71 ++ .../rtmpose/face6/rtmpose_face6.yml | 50 ++ .../lapa/rtmpose-m_8xb64-120e_lapa-256x256.py | 3 +- .../wflw/rtmpose-m_8xb64-60e_wflw-256x256.py | 3 +- ..._8xb32-210e_coco-wholebody-hand-256x256.py | 3 +- .../rtmpose-m_8xb256-210e_hand5-256x256.py | 3 +- ...ose-l_8xb32-270e_coco-wholebody-384x288.py | 3 +- ...ose-l_8xb64-270e_coco-wholebody-256x192.py | 3 +- ...ose-m_8xb64-270e_coco-wholebody-256x192.py | 3 +- ...l_udp_8xb64-210e_coco-wholebody-256x192.py | 1 - ...m_udp_8xb64-210e_coco-wholebody-256x192.py | 1 - projects/rtmpose/README.md | 30 +- projects/rtmpose/README_CN.md | 28 +- .../rtmpose-m_8xb64-210e_ap10k-256x256.py | 3 +- .../rtmpose-l_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-l_8xb256-420e_coco-384x288.py | 2 +- .../rtmpose-m_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-m_8xb256-420e_coco-384x288.py | 2 +- .../rtmpose-s_8xb256-420e_coco-256x192.py | 2 +- .../rtmpose-t_8xb256-420e_coco-256x192.py | 2 +- ... => rtmpose-m_8xb256-120e_lapa-256x256.py} | 35 +- .../rtmpose-s_8xb256-120e_lapa-256x256.py | 240 ++++++ .../rtmpose-t_8xb256-120e_lapa-256x256.py | 240 ++++++ ..._8xb32-210e_coco-wholebody-hand-256x256.py | 3 +- ...ose-l_8xb32-270e_coco-wholebody-384x288.py | 3 +- ...ose-l_8xb64-270e_coco-wholebody-256x192.py | 3 +- ...ose-m_8xb64-270e_coco-wholebody-256x192.py | 3 +- 56 files changed, 2770 insertions(+), 98 deletions(-) create mode 100644 configs/face_2d_keypoint/rtmpose/face6/rtmpose-m_8xb256-120e_face6-256x256.py create mode 100644 configs/face_2d_keypoint/rtmpose/face6/rtmpose-s_8xb256-120e_face6-256x256.py create mode 100644 configs/face_2d_keypoint/rtmpose/face6/rtmpose-t_8xb256-120e_face6-256x256.py create mode 100644 configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.md create mode 100644 configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.yml rename projects/rtmpose/rtmpose/face_2d_keypoint/{rtmpose-m_8xb64-120e_lapa-256x256.py => rtmpose-m_8xb256-120e_lapa-256x256.py} (89%) create mode 100644 projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py create mode 100644 projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py diff --git a/configs/animal_2d_keypoint/rtmpose/ap10k/rtmpose-m_8xb64-210e_ap10k-256x256.py b/configs/animal_2d_keypoint/rtmpose/ap10k/rtmpose-m_8xb64-210e_ap10k-256x256.py index 46dbfbef9f..0e8c007b31 100644 --- a/configs/animal_2d_keypoint/rtmpose/ap10k/rtmpose-m_8xb64-210e_ap10k-256x256.py +++ b/configs/animal_2d_keypoint/rtmpose/ap10k/rtmpose-m_8xb64-210e_ap10k-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py index dec6a3615b..3ccd98b514 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py index b5c83dd50d..3a9fa00d5c 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py index 39621ceb70..28424e4247 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py index 42a9355c6a..39da665365 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py index 3e82f07cf6..16d86c5fa6 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py @@ -76,7 +76,7 @@ in_channels=512, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py index fd705251db..8ec873e246 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py @@ -76,7 +76,7 @@ in_channels=384, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-256x192.py index 5af4dd04a2..662bd72924 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-256x192.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-384x288.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-384x288.py index 015c46b70b..7b5895962b 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-384x288.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_aic-coco-384x288.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py index 2459b3417e..7d77b88fde 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-256x192.py index 151197d1f1..c7840f6c46 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-256x192.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-384x288.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-384x288.py index 6507ba247b..1293a1ae1c 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-384x288.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_aic-coco-384x288.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py index 7438820418..f21d0e18c6 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_aic-coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_aic-coco-256x192.py index 29c82d78ed..6c9e9fdc55 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_aic-coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_aic-coco-256x192.py @@ -76,7 +76,7 @@ in_channels=512, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py index 1a2f57c95c..c0abcbb1dd 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=512, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_aic-coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_aic-coco-256x192.py index dd375b06bf..215a297944 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_aic-coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_aic-coco-256x192.py @@ -76,7 +76,7 @@ in_channels=384, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py index 4180d6c7b1..cbe0978b2b 100644 --- a/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=384, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/crowdpose/rtmpose-m_8xb64-210e_crowdpose-256x192.py b/configs/body_2d_keypoint/rtmpose/crowdpose/rtmpose-m_8xb64-210e_crowdpose-256x192.py index 3660fb43b7..e93a2f1099 100644 --- a/configs/body_2d_keypoint/rtmpose/crowdpose/rtmpose-m_8xb64-210e_crowdpose-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/crowdpose/rtmpose-m_8xb64-210e_crowdpose-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=14, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/rtmpose/mpii/rtmpose-m_8xb64-210e_mpii-256x256.py b/configs/body_2d_keypoint/rtmpose/mpii/rtmpose-m_8xb64-210e_mpii-256x256.py index d3ee02f215..ca67020f51 100644 --- a/configs/body_2d_keypoint/rtmpose/mpii/rtmpose-m_8xb64-210e_mpii-256x256.py +++ b/configs/body_2d_keypoint/rtmpose/mpii/rtmpose-m_8xb64-210e_mpii-256x256.py @@ -75,7 +75,7 @@ in_channels=768, out_channels=16, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/body_2d_keypoint/simcc/coco/simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/simcc/coco/simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192.py index 65101ada88..800803d190 100644 --- a/configs/body_2d_keypoint/simcc/coco/simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/simcc/coco/simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192.py @@ -51,7 +51,7 @@ in_channels=1280, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], deconv_out_channels=None, loss=dict(type='KLDiscretLoss', use_target_weight=True), diff --git a/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb32-140e_coco-384x288.py b/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb32-140e_coco-384x288.py index 8ed9586bfb..c04358299f 100644 --- a/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb32-140e_coco-384x288.py +++ b/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb32-140e_coco-384x288.py @@ -48,7 +48,7 @@ in_channels=2048, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], loss=dict(type='KLDiscretLoss', use_target_weight=True), decoder=codec), diff --git a/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb64-210e_coco-256x192.py index 1e1fe440d1..33232a4463 100644 --- a/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/simcc/coco/simcc_res50_8xb64-210e_coco-256x192.py @@ -42,7 +42,7 @@ in_channels=2048, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], loss=dict(type='KLDiscretLoss', use_target_weight=True), decoder=codec), diff --git a/configs/body_2d_keypoint/simcc/coco/simcc_vipnas-mbv3_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/simcc/coco/simcc_vipnas-mbv3_8xb64-210e_coco-256x192.py index ea61b0fb4f..ba8ba040cb 100644 --- a/configs/body_2d_keypoint/simcc/coco/simcc_vipnas-mbv3_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/simcc/coco/simcc_vipnas-mbv3_8xb64-210e_coco-256x192.py @@ -44,7 +44,7 @@ in_channels=160, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], deconv_type='vipnas', deconv_out_channels=(160, 160, 160), diff --git a/configs/body_2d_keypoint/simcc/mpii/simcc_res50_wo-deconv-8xb64-210e_mpii-256x256.py b/configs/body_2d_keypoint/simcc/mpii/simcc_res50_wo-deconv-8xb64-210e_mpii-256x256.py index 965fda71e6..ef8b47959e 100644 --- a/configs/body_2d_keypoint/simcc/mpii/simcc_res50_wo-deconv-8xb64-210e_mpii-256x256.py +++ b/configs/body_2d_keypoint/simcc/mpii/simcc_res50_wo-deconv-8xb64-210e_mpii-256x256.py @@ -48,7 +48,7 @@ in_channels=2048, out_channels=16, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], deconv_out_channels=None, loss=dict(type='KLDiscretLoss', use_target_weight=True), diff --git a/configs/body_2d_keypoint/topdown_heatmap/crowdpose/cspnext-m_udp_8xb64-210e_crowpose-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/crowdpose/cspnext-m_udp_8xb64-210e_crowpose-256x192.py index b1ba19a130..b083719303 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/crowdpose/cspnext-m_udp_8xb64-210e_crowpose-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/crowdpose/cspnext-m_udp_8xb64-210e_crowpose-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, diff --git a/configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose-m_8xb32-60e_coco-wholebody-face-256x256.py b/configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose-m_8xb32-60e_coco-wholebody-face-256x256.py index 41c9309707..958a361c07 100644 --- a/configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose-m_8xb32-60e_coco-wholebody-face-256x256.py +++ b/configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose-m_8xb32-60e_coco-wholebody-face-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=68, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/face_2d_keypoint/rtmpose/face6/rtmpose-m_8xb256-120e_face6-256x256.py b/configs/face_2d_keypoint/rtmpose/face6/rtmpose-m_8xb256-120e_face6-256x256.py new file mode 100644 index 0000000000..abbb2ce985 --- /dev/null +++ b/configs/face_2d_keypoint/rtmpose/face6/rtmpose-m_8xb256-120e_face6-256x256.py @@ -0,0 +1,690 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# lapa coco wflw 300w cofw halpe + +# runtime +max_epochs = 120 +stage2_num_epochs = 10 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=1) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.005, + begin=30, + end=max_epochs, + T_max=max_epochs - 30, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(256, 256), + sigma=(5.66, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmdetection/v3.0/' + 'rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=768, + out_channels=106, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = 'LapaDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.2), + dict(type='MedianBlur', p=0.2), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# train dataset +dataset_lapa = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_trainval.json', + data_prefix=dict(img='pose/LaPa/'), + pipeline=[], +) + +kpt_68_to_106 = [ + # + (0, 0), + (1, 2), + (2, 4), + (3, 6), + (4, 8), + (5, 10), + (6, 12), + (7, 14), + (8, 16), + (9, 18), + (10, 20), + (11, 22), + (12, 24), + (13, 26), + (14, 28), + (15, 30), + (16, 32), + # + (17, 33), + (18, 34), + (19, 35), + (20, 36), + (21, 37), + # + (22, 42), + (23, 43), + (24, 44), + (25, 45), + (26, 46), + # + (27, 51), + (28, 52), + (29, 53), + (30, 54), + # + (31, 58), + (32, 59), + (33, 60), + (34, 61), + (35, 62), + # + (36, 66), + (39, 70), + # + ((37, 38), 68), + ((40, 41), 72), + # + (42, 75), + (45, 79), + # + ((43, 44), 77), + ((46, 47), 81), + # + (48, 84), + (49, 85), + (50, 86), + (51, 87), + (52, 88), + (53, 89), + (54, 90), + (55, 91), + (56, 92), + (57, 93), + (58, 94), + (59, 95), + (60, 96), + (61, 97), + (62, 98), + (63, 99), + (64, 100), + (65, 101), + (66, 102), + (67, 103) +] + +mapping_halpe = [ + # + (26, 0), + (27, 2), + (28, 4), + (29, 6), + (30, 8), + (31, 10), + (32, 12), + (33, 14), + (34, 16), + (35, 18), + (36, 20), + (37, 22), + (38, 24), + (39, 26), + (40, 28), + (41, 30), + (42, 32), + # + (43, 33), + (44, 34), + (45, 35), + (46, 36), + (47, 37), + # + (48, 42), + (49, 43), + (50, 44), + (51, 45), + (52, 46), + # + (53, 51), + (54, 52), + (55, 53), + (56, 54), + # + (57, 58), + (58, 59), + (59, 60), + (60, 61), + (61, 62), + # + (62, 66), + (65, 70), + # + ((63, 64), 68), + ((66, 67), 72), + # + (68, 75), + (71, 79), + # + ((69, 70), 77), + ((72, 73), 81), + # + (74, 84), + (75, 85), + (76, 86), + (77, 87), + (78, 88), + (79, 89), + (80, 90), + (81, 91), + (82, 92), + (83, 93), + (84, 94), + (85, 95), + (86, 96), + (87, 97), + (88, 98), + (89, 99), + (90, 100), + (91, 101), + (92, 102), + (93, 103) +] + +mapping_wflw = [ + # + (0, 0), + (1, 1), + (2, 2), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), + (17, 17), + (18, 18), + (19, 19), + (20, 20), + (21, 21), + (22, 22), + (23, 23), + (24, 24), + (25, 25), + (26, 26), + (27, 27), + (28, 28), + (29, 29), + (30, 30), + (31, 31), + (32, 32), + # + (33, 33), + (34, 34), + (35, 35), + (36, 36), + (37, 37), + (38, 38), + (39, 39), + (40, 40), + (41, 41), + # + (42, 42), + (43, 43), + (44, 44), + (45, 45), + (46, 46), + (47, 47), + (48, 48), + (49, 49), + (50, 50), + # + (51, 51), + (52, 52), + (53, 53), + (54, 54), + # + (55, 58), + (56, 59), + (57, 60), + (58, 61), + (59, 62), + # + (60, 66), + (61, 67), + (62, 68), + (63, 69), + (64, 70), + (65, 71), + (66, 72), + (67, 73), + # + (68, 75), + (69, 76), + (70, 77), + (71, 78), + (72, 79), + (73, 80), + (74, 81), + (75, 82), + # + (76, 84), + (77, 85), + (78, 86), + (79, 87), + (80, 88), + (81, 89), + (82, 90), + (83, 91), + (84, 92), + (85, 93), + (86, 94), + (87, 95), + (88, 96), + (89, 97), + (90, 98), + (91, 99), + (92, 100), + (93, 101), + (94, 102), + (95, 103), + # + (96, 104), + # + (97, 105) +] + +mapping_cofw = [ + # + (0, 33), + (2, 38), + (4, 35), + (5, 40), + # + (1, 46), + (3, 50), + (6, 44), + (7, 48), + # + (8, 60), + (10, 64), + (12, 62), + (13, 66), + # + (9, 72), + (11, 68), + (14, 70), + (15, 74), + # + (18, 57), + (19, 63), + (20, 54), + (21, 60), + # + (22, 84), + (23, 90), + (24, 87), + (25, 98), + (26, 102), + (27, 93), + # + (28, 16) +] +dataset_coco = dict( + type='CocoWholeBodyFaceDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +dataset_wflw = dict( + type='WFLWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='wflw/annotations/face_landmarks_wflw_train.json', + data_prefix=dict(img='pose/WFLW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_wflw) + ], +) + +dataset_300w = dict( + type='Face300WDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='300w/annotations/face_landmarks_300w_train.json', + data_prefix=dict(img='pose/300w/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +dataset_cofw = dict( + type='COFWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='cofw/annotations/cofw_train.json', + data_prefix=dict(img='pose/COFW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_cofw) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_133kpt.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_halpe) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/lapa.py'), + datasets=[ + dataset_lapa, dataset_coco, dataset_wflw, dataset_300w, + dataset_cofw, dataset_halpe + ], + pipeline=train_pipeline, + test_mode=False, + )) +val_dataloader = dict( + batch_size=32, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_test.json', + data_prefix=dict(img='pose/LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) + +# test dataset +val_lapa = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_test.json', + data_prefix=dict(img='pose/LaPa/'), + pipeline=[], +) + +val_coco = dict( + type='CocoWholeBodyFaceDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +val_wflw = dict( + type='WFLWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='wflw/annotations/face_landmarks_wflw_test.json', + data_prefix=dict(img='pose/WFLW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_wflw) + ], +) + +val_300w = dict( + type='Face300WDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='300w/annotations/face_landmarks_300w_test.json', + data_prefix=dict(img='pose/300w/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +val_cofw = dict( + type='COFWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='cofw/annotations/cofw_test.json', + data_prefix=dict(img='pose/COFW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_cofw) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_halpe) + ], +) + +test_dataloader = dict( + batch_size=32, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/lapa.py'), + datasets=[val_lapa, val_coco, val_wflw, val_300w, val_cofw, val_halpe], + pipeline=val_pipeline, + test_mode=True, + )) + +# hooks +default_hooks = dict( + checkpoint=dict( + save_best='NME', rule='less', max_keep_ckpts=1, interval=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/configs/face_2d_keypoint/rtmpose/face6/rtmpose-s_8xb256-120e_face6-256x256.py b/configs/face_2d_keypoint/rtmpose/face6/rtmpose-s_8xb256-120e_face6-256x256.py new file mode 100644 index 0000000000..62fa305115 --- /dev/null +++ b/configs/face_2d_keypoint/rtmpose/face6/rtmpose-s_8xb256-120e_face6-256x256.py @@ -0,0 +1,691 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# lapa coco wflw 300w cofw halpe + +# runtime +max_epochs = 120 +stage2_num_epochs = 10 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=1) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.005, + begin=30, + end=max_epochs, + T_max=max_epochs - 30, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(256, 256), + sigma=(5.66, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmdetection/v3.0/' + 'rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth') + ), + head=dict( + type='RTMCCHead', + in_channels=512, + out_channels=106, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = 'LapaDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.2), + dict(type='MedianBlur', p=0.2), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +# train dataset +dataset_lapa = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_trainval.json', + data_prefix=dict(img='pose/LaPa/'), + pipeline=[], +) + +kpt_68_to_106 = [ + # + (0, 0), + (1, 2), + (2, 4), + (3, 6), + (4, 8), + (5, 10), + (6, 12), + (7, 14), + (8, 16), + (9, 18), + (10, 20), + (11, 22), + (12, 24), + (13, 26), + (14, 28), + (15, 30), + (16, 32), + # + (17, 33), + (18, 34), + (19, 35), + (20, 36), + (21, 37), + # + (22, 42), + (23, 43), + (24, 44), + (25, 45), + (26, 46), + # + (27, 51), + (28, 52), + (29, 53), + (30, 54), + # + (31, 58), + (32, 59), + (33, 60), + (34, 61), + (35, 62), + # + (36, 66), + (39, 70), + # + ((37, 38), 68), + ((40, 41), 72), + # + (42, 75), + (45, 79), + # + ((43, 44), 77), + ((46, 47), 81), + # + (48, 84), + (49, 85), + (50, 86), + (51, 87), + (52, 88), + (53, 89), + (54, 90), + (55, 91), + (56, 92), + (57, 93), + (58, 94), + (59, 95), + (60, 96), + (61, 97), + (62, 98), + (63, 99), + (64, 100), + (65, 101), + (66, 102), + (67, 103) +] + +mapping_halpe = [ + # + (26, 0), + (27, 2), + (28, 4), + (29, 6), + (30, 8), + (31, 10), + (32, 12), + (33, 14), + (34, 16), + (35, 18), + (36, 20), + (37, 22), + (38, 24), + (39, 26), + (40, 28), + (41, 30), + (42, 32), + # + (43, 33), + (44, 34), + (45, 35), + (46, 36), + (47, 37), + # + (48, 42), + (49, 43), + (50, 44), + (51, 45), + (52, 46), + # + (53, 51), + (54, 52), + (55, 53), + (56, 54), + # + (57, 58), + (58, 59), + (59, 60), + (60, 61), + (61, 62), + # + (62, 66), + (65, 70), + # + ((63, 64), 68), + ((66, 67), 72), + # + (68, 75), + (71, 79), + # + ((69, 70), 77), + ((72, 73), 81), + # + (74, 84), + (75, 85), + (76, 86), + (77, 87), + (78, 88), + (79, 89), + (80, 90), + (81, 91), + (82, 92), + (83, 93), + (84, 94), + (85, 95), + (86, 96), + (87, 97), + (88, 98), + (89, 99), + (90, 100), + (91, 101), + (92, 102), + (93, 103) +] + +mapping_wflw = [ + # + (0, 0), + (1, 1), + (2, 2), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), + (17, 17), + (18, 18), + (19, 19), + (20, 20), + (21, 21), + (22, 22), + (23, 23), + (24, 24), + (25, 25), + (26, 26), + (27, 27), + (28, 28), + (29, 29), + (30, 30), + (31, 31), + (32, 32), + # + (33, 33), + (34, 34), + (35, 35), + (36, 36), + (37, 37), + (38, 38), + (39, 39), + (40, 40), + (41, 41), + # + (42, 42), + (43, 43), + (44, 44), + (45, 45), + (46, 46), + (47, 47), + (48, 48), + (49, 49), + (50, 50), + # + (51, 51), + (52, 52), + (53, 53), + (54, 54), + # + (55, 58), + (56, 59), + (57, 60), + (58, 61), + (59, 62), + # + (60, 66), + (61, 67), + (62, 68), + (63, 69), + (64, 70), + (65, 71), + (66, 72), + (67, 73), + # + (68, 75), + (69, 76), + (70, 77), + (71, 78), + (72, 79), + (73, 80), + (74, 81), + (75, 82), + # + (76, 84), + (77, 85), + (78, 86), + (79, 87), + (80, 88), + (81, 89), + (82, 90), + (83, 91), + (84, 92), + (85, 93), + (86, 94), + (87, 95), + (88, 96), + (89, 97), + (90, 98), + (91, 99), + (92, 100), + (93, 101), + (94, 102), + (95, 103), + # + (96, 104), + # + (97, 105) +] + +mapping_cofw = [ + # + (0, 33), + (2, 38), + (4, 35), + (5, 40), + # + (1, 46), + (3, 50), + (6, 44), + (7, 48), + # + (8, 60), + (10, 64), + (12, 62), + (13, 66), + # + (9, 72), + (11, 68), + (14, 70), + (15, 74), + # + (18, 57), + (19, 63), + (20, 54), + (21, 60), + # + (22, 84), + (23, 90), + (24, 87), + (25, 98), + (26, 102), + (27, 93), + # + (28, 16) +] +dataset_coco = dict( + type='CocoWholeBodyFaceDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +dataset_wflw = dict( + type='WFLWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='wflw/annotations/face_landmarks_wflw_train.json', + data_prefix=dict(img='pose/WFLW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_wflw) + ], +) + +dataset_300w = dict( + type='Face300WDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='300w/annotations/face_landmarks_300w_train.json', + data_prefix=dict(img='pose/300w/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +dataset_cofw = dict( + type='COFWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='cofw/annotations/cofw_train.json', + data_prefix=dict(img='pose/COFW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_cofw) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_133kpt.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_halpe) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/lapa.py'), + datasets=[ + dataset_lapa, dataset_coco, dataset_wflw, dataset_300w, + dataset_cofw, dataset_halpe + ], + pipeline=train_pipeline, + test_mode=False, + )) +val_dataloader = dict( + batch_size=32, + num_workers=10, + pin_memory=True, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_test.json', + data_prefix=dict(img='pose/LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) + +# test dataset +val_lapa = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_test.json', + data_prefix=dict(img='pose/LaPa/'), + pipeline=[], +) + +val_coco = dict( + type='CocoWholeBodyFaceDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +val_wflw = dict( + type='WFLWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='wflw/annotations/face_landmarks_wflw_test.json', + data_prefix=dict(img='pose/WFLW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_wflw) + ], +) + +val_300w = dict( + type='Face300WDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='300w/annotations/face_landmarks_300w_test.json', + data_prefix=dict(img='pose/300w/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +val_cofw = dict( + type='COFWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='cofw/annotations/cofw_test.json', + data_prefix=dict(img='pose/COFW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_cofw) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_halpe) + ], +) + +test_dataloader = dict( + batch_size=32, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/lapa.py'), + datasets=[val_lapa, val_coco, val_wflw, val_300w, val_cofw, val_halpe], + pipeline=val_pipeline, + test_mode=True, + )) + +# hooks +default_hooks = dict( + checkpoint=dict( + save_best='NME', rule='less', max_keep_ckpts=1, interval=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/configs/face_2d_keypoint/rtmpose/face6/rtmpose-t_8xb256-120e_face6-256x256.py b/configs/face_2d_keypoint/rtmpose/face6/rtmpose-t_8xb256-120e_face6-256x256.py new file mode 100644 index 0000000000..751bedffe7 --- /dev/null +++ b/configs/face_2d_keypoint/rtmpose/face6/rtmpose-t_8xb256-120e_face6-256x256.py @@ -0,0 +1,689 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# lapa coco wflw 300w cofw halpe + +# runtime +max_epochs = 120 +stage2_num_epochs = 10 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=1) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.005, + begin=30, + end=max_epochs, + T_max=90, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(256, 256), + sigma=(5.66, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.167, + widen_factor=0.375, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmdetection/v3.0/' + 'rtmdet/cspnext_rsb_pretrain/cspnext-tiny_imagenet_600e-3a2dd350.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=384, + out_channels=106, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = 'LapaDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.2), + dict(type='MedianBlur', p=0.2), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +# train dataset +dataset_lapa = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_trainval.json', + data_prefix=dict(img='pose/LaPa/'), + pipeline=[], +) + +kpt_68_to_106 = [ + # + (0, 0), + (1, 2), + (2, 4), + (3, 6), + (4, 8), + (5, 10), + (6, 12), + (7, 14), + (8, 16), + (9, 18), + (10, 20), + (11, 22), + (12, 24), + (13, 26), + (14, 28), + (15, 30), + (16, 32), + # + (17, 33), + (18, 34), + (19, 35), + (20, 36), + (21, 37), + # + (22, 42), + (23, 43), + (24, 44), + (25, 45), + (26, 46), + # + (27, 51), + (28, 52), + (29, 53), + (30, 54), + # + (31, 58), + (32, 59), + (33, 60), + (34, 61), + (35, 62), + # + (36, 66), + (39, 70), + # + ((37, 38), 68), + ((40, 41), 72), + # + (42, 75), + (45, 79), + # + ((43, 44), 77), + ((46, 47), 81), + # + (48, 84), + (49, 85), + (50, 86), + (51, 87), + (52, 88), + (53, 89), + (54, 90), + (55, 91), + (56, 92), + (57, 93), + (58, 94), + (59, 95), + (60, 96), + (61, 97), + (62, 98), + (63, 99), + (64, 100), + (65, 101), + (66, 102), + (67, 103) +] + +mapping_halpe = [ + # + (26, 0), + (27, 2), + (28, 4), + (29, 6), + (30, 8), + (31, 10), + (32, 12), + (33, 14), + (34, 16), + (35, 18), + (36, 20), + (37, 22), + (38, 24), + (39, 26), + (40, 28), + (41, 30), + (42, 32), + # + (43, 33), + (44, 34), + (45, 35), + (46, 36), + (47, 37), + # + (48, 42), + (49, 43), + (50, 44), + (51, 45), + (52, 46), + # + (53, 51), + (54, 52), + (55, 53), + (56, 54), + # + (57, 58), + (58, 59), + (59, 60), + (60, 61), + (61, 62), + # + (62, 66), + (65, 70), + # + ((63, 64), 68), + ((66, 67), 72), + # + (68, 75), + (71, 79), + # + ((69, 70), 77), + ((72, 73), 81), + # + (74, 84), + (75, 85), + (76, 86), + (77, 87), + (78, 88), + (79, 89), + (80, 90), + (81, 91), + (82, 92), + (83, 93), + (84, 94), + (85, 95), + (86, 96), + (87, 97), + (88, 98), + (89, 99), + (90, 100), + (91, 101), + (92, 102), + (93, 103) +] + +mapping_wflw = [ + # + (0, 0), + (1, 1), + (2, 2), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), + (17, 17), + (18, 18), + (19, 19), + (20, 20), + (21, 21), + (22, 22), + (23, 23), + (24, 24), + (25, 25), + (26, 26), + (27, 27), + (28, 28), + (29, 29), + (30, 30), + (31, 31), + (32, 32), + # + (33, 33), + (34, 34), + (35, 35), + (36, 36), + (37, 37), + (38, 38), + (39, 39), + (40, 40), + (41, 41), + # + (42, 42), + (43, 43), + (44, 44), + (45, 45), + (46, 46), + (47, 47), + (48, 48), + (49, 49), + (50, 50), + # + (51, 51), + (52, 52), + (53, 53), + (54, 54), + # + (55, 58), + (56, 59), + (57, 60), + (58, 61), + (59, 62), + # + (60, 66), + (61, 67), + (62, 68), + (63, 69), + (64, 70), + (65, 71), + (66, 72), + (67, 73), + # + (68, 75), + (69, 76), + (70, 77), + (71, 78), + (72, 79), + (73, 80), + (74, 81), + (75, 82), + # + (76, 84), + (77, 85), + (78, 86), + (79, 87), + (80, 88), + (81, 89), + (82, 90), + (83, 91), + (84, 92), + (85, 93), + (86, 94), + (87, 95), + (88, 96), + (89, 97), + (90, 98), + (91, 99), + (92, 100), + (93, 101), + (94, 102), + (95, 103), + # + (96, 104), + # + (97, 105) +] + +mapping_cofw = [ + # + (0, 33), + (2, 38), + (4, 35), + (5, 40), + # + (1, 46), + (3, 50), + (6, 44), + (7, 48), + # + (8, 60), + (10, 64), + (12, 62), + (13, 66), + # + (9, 72), + (11, 68), + (14, 70), + (15, 74), + # + (18, 57), + (19, 63), + (20, 54), + (21, 60), + # + (22, 84), + (23, 90), + (24, 87), + (25, 98), + (26, 102), + (27, 93), + # + (28, 16) +] +dataset_coco = dict( + type='CocoWholeBodyFaceDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +dataset_wflw = dict( + type='WFLWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='wflw/annotations/face_landmarks_wflw_train.json', + data_prefix=dict(img='pose/WFLW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_wflw) + ], +) + +dataset_300w = dict( + type='Face300WDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='300w/annotations/face_landmarks_300w_train.json', + data_prefix=dict(img='pose/300w/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +dataset_cofw = dict( + type='COFWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='cofw/annotations/cofw_train.json', + data_prefix=dict(img='pose/COFW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_cofw) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_133kpt.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_halpe) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/lapa.py'), + datasets=[ + dataset_lapa, dataset_coco, dataset_wflw, dataset_300w, + dataset_cofw, dataset_halpe + ], + pipeline=train_pipeline, + test_mode=False, + )) +val_dataloader = dict( + batch_size=32, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_test.json', + data_prefix=dict(img='pose/LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) + +# test dataset +val_lapa = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='LaPa/annotations/lapa_test.json', + data_prefix=dict(img='pose/LaPa/'), + pipeline=[], +) + +val_coco = dict( + type='CocoWholeBodyFaceDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +val_wflw = dict( + type='WFLWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='wflw/annotations/face_landmarks_wflw_test.json', + data_prefix=dict(img='pose/WFLW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_wflw) + ], +) + +val_300w = dict( + type='Face300WDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='300w/annotations/face_landmarks_300w_test.json', + data_prefix=dict(img='pose/300w/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=kpt_68_to_106) + ], +) + +val_cofw = dict( + type='COFWDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='cofw/annotations/cofw_test.json', + data_prefix=dict(img='pose/COFW/images/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_cofw) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', num_keypoints=106, mapping=mapping_halpe) + ], +) + +test_dataloader = dict( + batch_size=32, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/lapa.py'), + datasets=[val_lapa, val_coco, val_wflw, val_300w, val_cofw, val_halpe], + pipeline=val_pipeline, + test_mode=True, + )) + +# hooks +default_hooks = dict( + checkpoint=dict( + save_best='NME', rule='less', max_keep_ckpts=1, interval=1)) + +custom_hooks = [ + # dict( + # type='EMAHook', + # ema_type='ExpMomentumEMA', + # momentum=0.0002, + # update_buffers=True, + # priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.md b/configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.md new file mode 100644 index 0000000000..254633e42c --- /dev/null +++ b/configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.md @@ -0,0 +1,71 @@ + + +
+RTMPose (arXiv'2023) + +```bibtex +@misc{https://doi.org/10.48550/arxiv.2303.07399, + doi = {10.48550/ARXIV.2303.07399}, + url = {https://arxiv.org/abs/2303.07399}, + author = {Jiang, Tao and Lu, Peng and Zhang, Li and Ma, Ningsheng and Han, Rui and Lyu, Chengqi and Li, Yining and Chen, Kai}, + keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences}, + title = {RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose}, + publisher = {arXiv}, + year = {2023}, + copyright = {Creative Commons Attribution 4.0 International} +} + +``` + +
+ + + +
+RTMDet (arXiv'2022) + +```bibtex +@misc{lyu2022rtmdet, + title={RTMDet: An Empirical Study of Designing Real-Time Object Detectors}, + author={Chengqi Lyu and Wenwei Zhang and Haian Huang and Yue Zhou and Yudong Wang and Yanyi Liu and Shilong Zhang and Kai Chen}, + year={2022}, + eprint={2212.07784}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +
+ + + +
+COCO (ECCV'2014) + +```bibtex +@inproceedings{lin2014microsoft, + title={Microsoft coco: Common objects in context}, + author={Lin, Tsung-Yi and Maire, Michael and Belongie, Serge and Hays, James and Perona, Pietro and Ramanan, Deva and Doll{\'a}r, Piotr and Zitnick, C Lawrence}, + booktitle={European conference on computer vision}, + pages={740--755}, + year={2014}, + organization={Springer} +} +``` + +
+ +- Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 dataset. +- `Face6` and `*` denote model trained on 6 public datasets: + - [COCO-Wholebody-Face](https://github.com/jin-s13/COCO-WholeBody/) + - [WFLW](https://wywu.github.io/projects/LAB/WFLW.html) + - [300W](https://ibug.doc.ic.ac.uk/resources/300-W/) + - [COFW](http://www.vision.caltech.edu/xpburgos/ICCV13/) + - [Halpe](https://github.com/Fang-Haoshu/Halpe-FullBody/) + - [LaPa](https://github.com/JDAI-CV/lapa-dataset) + +| Config | Input Size | NME
(LaPa) | FLOPS
(G) | Download | +| :--------------------------------------------------------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------------------------------------------: | +| [RTMPose-t\*](./rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py) | 256x256 | 1.67 | 0.652 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-face6_pt-in1k_120e-256x256-df79d9a5_20230529.pth) | +| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | +| [RTMPose-m\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.44 | 2.852 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-face6_pt-in1k_120e-256x256-72a37400_20230529.pth) | diff --git a/configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.yml b/configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.yml new file mode 100644 index 0000000000..2cd822a337 --- /dev/null +++ b/configs/face_2d_keypoint/rtmpose/face6/rtmpose_face6.yml @@ -0,0 +1,50 @@ +Collections: +- Name: RTMPose + Paper: + Title: "RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose" + URL: https://arxiv.org/abs/2303.07399 + README: https://github.com/open-mmlab/mmpose/blob/main/projects/rtmpose/README.md +Models: +- Config: configs/face_2d_keypoint/rtmpose/face6/rtmpose-t_8xb256-120e_face6-256x256.py + In Collection: RTMPose + Metadata: + Architecture: &id001 + - RTMPose + Training Data: &id002 + - COCO-Wholebody-Face + - WFLW + - 300W + - COFW + - Halpe + - LaPa + Name: rtmpose-t_8xb256-120e_face6-256x256 + Results: + - Dataset: Face6 + Metrics: + NME: 1.67 + Task: Face 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-face6_pt-in1k_120e-256x256-df79d9a5_20230529.pth +- Config: configs/face_2d_keypoint/rtmpose/face6/rtmpose-s_8xb256-120e_face6-256x256.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-s_8xb256-120e_face6-256x256 + Results: + - Dataset: Face6 + Metrics: + NME: 1.59 + Task: Face 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth +- Config: configs/face_2d_keypoint/rtmpose/face6/rtmpose-m_8xb256-120e_face6-256x256.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-m_8xb256-120e_face6-256x256 + Results: + - Dataset: Face6 + Metrics: + NME: 1.44 + Task: Face 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-face6_pt-in1k_120e-256x256-72a37400_20230529.pth diff --git a/configs/face_2d_keypoint/rtmpose/lapa/rtmpose-m_8xb64-120e_lapa-256x256.py b/configs/face_2d_keypoint/rtmpose/lapa/rtmpose-m_8xb64-120e_lapa-256x256.py index b4124ff6d8..fee1201db1 100644 --- a/configs/face_2d_keypoint/rtmpose/lapa/rtmpose-m_8xb64-120e_lapa-256x256.py +++ b/configs/face_2d_keypoint/rtmpose/lapa/rtmpose-m_8xb64-120e_lapa-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=106, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/face_2d_keypoint/rtmpose/wflw/rtmpose-m_8xb64-60e_wflw-256x256.py b/configs/face_2d_keypoint/rtmpose/wflw/rtmpose-m_8xb64-60e_wflw-256x256.py index f3f8c06e43..cbfd788d60 100644 --- a/configs/face_2d_keypoint/rtmpose/wflw/rtmpose-m_8xb64-60e_wflw-256x256.py +++ b/configs/face_2d_keypoint/rtmpose/wflw/rtmpose-m_8xb64-60e_wflw-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=98, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/hand_2d_keypoint/rtmpose/coco_wholebody_hand/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py b/configs/hand_2d_keypoint/rtmpose/coco_wholebody_hand/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py index 7d5438586e..48c7193394 100644 --- a/configs/hand_2d_keypoint/rtmpose/coco_wholebody_hand/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py +++ b/configs/hand_2d_keypoint/rtmpose/coco_wholebody_hand/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=21, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/hand_2d_keypoint/rtmpose/hand5/rtmpose-m_8xb256-210e_hand5-256x256.py b/configs/hand_2d_keypoint/rtmpose/hand5/rtmpose-m_8xb256-210e_hand5-256x256.py index 689dc68096..f329f1cb1d 100644 --- a/configs/hand_2d_keypoint/rtmpose/hand5/rtmpose-m_8xb256-210e_hand5-256x256.py +++ b/configs/hand_2d_keypoint/rtmpose/hand5/rtmpose-m_8xb256-210e_hand5-256x256.py @@ -26,7 +26,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -78,7 +77,7 @@ in_channels=768, out_channels=21, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py index a926fe38d7..71715fe97a 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=1024, out_channels=133, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py index 026336a550..bbacb5deba 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=1024, out_channels=133, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py index 6a834b8301..880bfaf774 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=133, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-l_udp_8xb64-210e_coco-wholebody-256x192.py b/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-l_udp_8xb64-210e_coco-wholebody-256x192.py index 2112e19e76..7182e7a3ed 100644 --- a/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-l_udp_8xb64-210e_coco-wholebody-256x192.py +++ b/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-l_udp_8xb64-210e_coco-wholebody-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, diff --git a/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-m_udp_8xb64-210e_coco-wholebody-256x192.py b/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-m_udp_8xb64-210e_coco-wholebody-256x192.py index bfcb5c3917..05fae649b8 100644 --- a/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-m_udp_8xb64-210e_coco-wholebody-256x192.py +++ b/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-m_udp_8xb64-210e_coco-wholebody-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index cc2e300e6c..ca4d423a6f 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -52,8 +52,7 @@ ______________________________________________________________________ - May. 2023: - Add [code examples](./examples/) of RTMPose. - - Release Hand models trained on 5 datasets. - - Release Body models trained on 7 datasets. + - Release Hand, Face, Body models trained on combined datasets. - Mar. 2023: RTMPose is released. RTMPose-m runs at 430+ FPS and achieves 75.8 mAP on COCO val set. ## 📖 Introduction [🔝](#-table-of-contents) @@ -63,7 +62,7 @@ ______________________________________________________________________
- +
@@ -227,13 +226,24 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### Face 2d (106 Keypoints) -
- -
+
+Face6 + +- `Face6` and `*` denote model trained on 6 public datasets: + - [COCO-Wholebody-Face](https://github.com/jin-s13/COCO-WholeBody/) + - [WFLW](https://wywu.github.io/projects/LAB/WFLW.html) + - [300W](https://ibug.doc.ic.ac.uk/resources/300-W/) + - [COFW](http://www.vision.caltech.edu/xpburgos/ICCV13/) + - [Halpe](https://github.com/Fang-Haoshu/Halpe-FullBody/) + - [LaPa](https://github.com/JDAI-CV/lapa-dataset) -| Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | -| :-------------------------------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :---------: | -| [RTMPose-m (alpha version)](./rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py) | 256x256 | 1.70 | - | - | - | Coming soon | +| Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | +| [RTMPose-t\*](./rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py) | 256x256 | 1.67 | 0.652 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-face6_pt-in1k_120e-256x256-df79d9a5_20230529.pth) | +| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | +| [RTMPose-m\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.44 | 2.852 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-face6_pt-in1k_120e-256x256-72a37400_20230529.pth) | + +
### Hand 2d (21 Keypoints) @@ -249,7 +259,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr - [OneHand10K](https://www.yangangwang.com/papers/WANG-MCC-2018-10.html) - [FreiHand2d](https://lmb.informatik.uni-freiburg.de/projects/freihand/) - [RHD2d](https://lmb.informatik.uni-freiburg.de/resources/datasets/RenderedHandposeDataset.en.html) - - [Halpe](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_wholebody_keypoint.html#halpe) + - [Halpe](https://github.com/Fang-Haoshu/Halpe-FullBody/) | Config | Input Size | PCK@0.2
(COCO-Wholebody-Hand) | PCK@0.2
(Hand5) | AUC
(Hand5) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | | :-------------------------------------------------------------------------------------------------------------------: | :--------: | :-----------------------------------: | :---------------------: | :-----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 6f2da2e662..34a3be9179 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -48,8 +48,7 @@ ______________________________________________________________________ - 2023 年 5 月: - 添加 [代码示例](./examples/) - - 发布混合数据集训练的 Hand 模型。 - - 发布混合数据集训练的 Body 模型。 + - 发布混合数据集训练的 Hand, Face, Body 模型。 - 2023 年 3 月:发布 RTMPose。RTMPose-m 取得 COCO 验证集 75.8 mAP,推理速度达到 430+ FPS 。 ## 📖 简介 [🔝](#-table-of-contents) @@ -59,7 +58,7 @@ ______________________________________________________________________
- +
@@ -218,13 +217,24 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 脸部 2d 关键点 (106 Keypoints) -
- -
+
+Face6 + +- `Face6` and `*` 代表模型在 6 个开源数据集上训练得到: + - [COCO-Wholebody-Face](https://github.com/jin-s13/COCO-WholeBody/) + - [WFLW](https://wywu.github.io/projects/LAB/WFLW.html) + - [300W](https://ibug.doc.ic.ac.uk/resources/300-W/) + - [COFW](http://www.vision.caltech.edu/xpburgos/ICCV13/) + - [Halpe](https://github.com/Fang-Haoshu/Halpe-FullBody/) + - [LaPa](https://github.com/JDAI-CV/lapa-dataset) -| Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | -| :-------------------------------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :---------: | -| [RTMPose-m (试用)](./rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py) | 256x256 | 1.70 | - | - | - | Coming soon | +| Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | +| :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | +| [RTMPose-t\*](./rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py) | 256x256 | 1.67 | 0.652 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-face6_pt-in1k_120e-256x256-df79d9a5_20230529.pth) | +| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | +| [RTMPose-m\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.44 | 2.852 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-face6_pt-in1k_120e-256x256-72a37400_20230529.pth) | + +
### 手部 2d 关键点 (21 Keypoints) diff --git a/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py b/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py index 93c4a12e12..337ce8cfd9 100644 --- a/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py +++ b/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py index 4e69788cd1..ab1479dc76 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py index a3afbce51d..5f8cadc5b0 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py @@ -76,7 +76,7 @@ in_channels=1024, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py index 25e5c40115..89b0e682f8 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py index 4d9ecb1e96..64169d0b3b 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py @@ -76,7 +76,7 @@ in_channels=768, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py index d5b84c478f..2a18f9b9b1 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=512, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py index 49c524bcfb..cf37c86131 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py @@ -76,7 +76,7 @@ in_channels=384, out_channels=17, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py similarity index 89% rename from projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py rename to projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py index 807e0c12fa..baeca2c138 100644 --- a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb64-120e_lapa-256x256.py +++ b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py @@ -12,6 +12,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,12 +25,11 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', - eta_min=base_lr * 0.05, - begin=max_epochs // 2, + eta_min=base_lr * 0.005, + begin=30, end=max_epochs, - T_max=max_epochs // 2, + T_max=max_epochs - 30, by_epoch=True, convert_to_iter_based=True), ] @@ -68,15 +68,15 @@ init_cfg=dict( type='Pretrained', prefix='backbone.', - checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' - 'rtmposev1/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth' # noqa + checkpoint='https://download.openmmlab.com/mmdetection/v3.0/' + 'rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth' # noqa )), head=dict( type='RTMCCHead', in_channels=768, out_channels=106, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( @@ -99,7 +99,7 @@ # base dataset settings dataset_type = 'LapaDataset' data_mode = 'topdown' -data_root = 'data/LaPa/' +data_root = 'data/' backend_args = dict(backend='local') @@ -113,7 +113,6 @@ type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=80), dict(type='TopdownAffine', input_size=codec['input_size']), dict(type='mmdet.YOLOXHSVRandomAug'), - dict(type='PhotometricDistortion'), dict( type='Albumentation', transforms=[ @@ -147,8 +146,8 @@ dict( type='RandomBBoxTransform', shift_factor=0., - scale_factor=[0.75, 1.25], - rotate_factor=60), + scale_factor=[0.5, 1.5], + rotate_factor=80), dict(type='TopdownAffine', input_size=codec['input_size']), dict(type='mmdet.YOLOXHSVRandomAug'), dict( @@ -172,7 +171,7 @@ # data loaders train_dataloader = dict( - batch_size=32, + batch_size=256, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -181,12 +180,12 @@ data_root=data_root, data_mode=data_mode, ann_file='annotations/lapa_trainval.json', - data_prefix=dict(img=''), + data_prefix=dict(img='LaPa/'), pipeline=train_pipeline, )) val_dataloader = dict( batch_size=32, - num_workers=10, + num_workers=4, persistent_workers=True, drop_last=False, sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), @@ -195,13 +194,13 @@ data_root=data_root, data_mode=data_mode, ann_file='annotations/lapa_test.json', - data_prefix=dict(img=''), + data_prefix=dict(img='LaPa/'), test_mode=True, pipeline=val_pipeline, )) test_dataloader = dict( batch_size=32, - num_workers=10, + num_workers=4, persistent_workers=True, drop_last=False, sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), @@ -210,7 +209,7 @@ data_root=data_root, data_mode=data_mode, ann_file='annotations/lapa_test.json', - data_prefix=dict(img=''), + data_prefix=dict(img='LaPa/'), test_mode=True, pipeline=val_pipeline, )) @@ -218,7 +217,7 @@ # hooks default_hooks = dict( checkpoint=dict( - save_best='NME', rule='less', max_keep_ckpts=1, interval=1)) + save_best='NME', rule='less', max_keep_ckpts=3, interval=1)) custom_hooks = [ dict( diff --git a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py new file mode 100644 index 0000000000..777a67c28e --- /dev/null +++ b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py @@ -0,0 +1,240 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# runtime +max_epochs = 120 +stage2_num_epochs = 10 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=1) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.005, + begin=30, + end=max_epochs, + T_max=max_epochs - 30, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(256, 256), + sigma=(5.66, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmdetection/v3.0/' + 'rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth') + ), + head=dict( + type='RTMCCHead', + in_channels=512, + out_channels=106, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = 'LapaDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.2), + dict(type='MedianBlur', p=0.2), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/lapa_trainval.json', + data_prefix=dict(img='LaPa/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/lapa_test.json', + data_prefix=dict(img='LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/lapa_test.json', + data_prefix=dict(img='LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) + +# hooks +default_hooks = dict( + checkpoint=dict( + save_best='NME', rule='less', max_keep_ckpts=3, interval=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py new file mode 100644 index 0000000000..2602b642cc --- /dev/null +++ b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py @@ -0,0 +1,240 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# runtime +max_epochs = 120 +stage2_num_epochs = 10 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=1) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.005, + begin=30, + end=max_epochs, + T_max=max_epochs - 30, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(256, 256), + sigma=(5.66, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.167, + widen_factor=0.375, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmdetection/v3.0/' + 'rtmdet/cspnext_rsb_pretrain/cspnext-tiny_imagenet_600e-3a2dd350.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=384, + out_channels=106, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = 'LapaDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.2), + dict(type='MedianBlur', p=0.2), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/lapa_trainval.json', + data_prefix=dict(img='LaPa/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/lapa_test.json', + data_prefix=dict(img='LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/lapa_test.json', + data_prefix=dict(img='LaPa/'), + test_mode=True, + pipeline=val_pipeline, + )) + +# hooks +default_hooks = dict( + checkpoint=dict( + save_best='NME', rule='less', max_keep_ckpts=3, interval=1)) + +custom_hooks = [ + # dict( + # type='EMAHook', + # ema_type='ExpMomentumEMA', + # momentum=0.0002, + # update_buffers=True, + # priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py b/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py index 1be01c7959..eb477a9426 100644 --- a/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py +++ b/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=21, input_size=codec['input_size'], - in_featuremap_size=(8, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py index 06371eddba..df44b5a64f 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=1024, out_channels=133, input_size=codec['input_size'], - in_featuremap_size=(9, 12), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py index 17d58f8317..aa02d77209 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=1024, out_channels=133, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py index 1369c45672..309e0431b4 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py @@ -24,7 +24,6 @@ begin=0, end=1000), dict( - # use cosine lr from 150 to 300 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -76,7 +75,7 @@ in_channels=768, out_channels=133, input_size=codec['input_size'], - in_featuremap_size=(6, 8), + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], final_layer_kernel_size=7, gau_cfg=dict( From 5109106917f0cd630d01a00943ebc317a71c9381 Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 29 May 2023 15:13:50 +0800 Subject: [PATCH 10/52] [Docs] Update RTMPose logo (#2406) --- projects/README.md | 2 +- projects/rtmpose/README.md | 2 +- projects/rtmpose/README_CN.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/projects/README.md b/projects/README.md index cca7bd947e..a10ccad65a 100644 --- a/projects/README.md +++ b/projects/README.md @@ -33,7 +33,7 @@ We also provide some documentation listed below to help you get started: - **[:zap:RTMPose](./rtmpose)**: Real-Time Multi-Person Pose Estimation toolkit based on MMPose
- +

- **[:art:MMPose4AIGC](./mmpose4aigc)**: Guide AI image generation with MMPose diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index ca4d423a6f..4d6f4e6d94 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -1,5 +1,5 @@
- +
# RTMPose: Real-Time Multi-Person Pose Estimation toolkit based on MMPose diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 34a3be9179..7abafc25c4 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -1,5 +1,5 @@
- +
# RTMPose: Real-Time Multi-Person Pose Estimation toolkit based on MMPose From b647b8c9e876e5a6f8d653e829136eecf9b4c4a8 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Wed, 31 May 2023 10:50:06 +0800 Subject: [PATCH 11/52] [Enhance] specify filename for inferencer (#2416) --- .../inferencers/base_mmpose_inferencer.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index 0f966e9b0f..bf41d821a0 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -321,22 +321,31 @@ def visualize(self, if vis_out_dir: out_img = mmcv.rgb2bgr(visualization) + _, file_extension = os.path.splitext(vis_out_dir) + if file_extension: + dir_name = os.path.dirname(vis_out_dir) + file_name = os.path.basename(vis_out_dir) + else: + dir_name = vis_out_dir + file_name = None + mkdir_or_exist(dir_name) if self._video_input: if self.video_info['writer'] is None: fourcc = cv2.VideoWriter_fourcc(*'mp4v') - mkdir_or_exist(vis_out_dir) - out_file = join_path( - vis_out_dir, - os.path.basename(self.video_info['name'])) + if file_name is None: + file_name = os.path.basename( + self.video_info['name']) + out_file = join_path(dir_name, file_name) self.video_info['writer'] = cv2.VideoWriter( out_file, fourcc, self.video_info['fps'], (visualization.shape[1], visualization.shape[0])) self.video_info['writer'].write(out_img) else: - out_file = join_path(vis_out_dir, img_name) + file_name = file_name if file_name else img_name + out_file = join_path(dir_name, file_name) mmcv.imwrite(out_img, out_file) if return_vis: From 2ef98c3ff9ff319cfca47ec36b6c5c8ddc78a3aa Mon Sep 17 00:00:00 2001 From: Yifan Lareina WU Date: Thu, 1 Jun 2023 11:24:39 +0800 Subject: [PATCH 12/52] [Refactor] Refactor SimpleBaseline3d and VideoPose3d (#2420) --- .../video_pose_lift/README.md | 17 + ...pose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py | 132 ++++ ...e3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py | 132 ++++ ...videopose3d-243frm-supv_8xb128-80e_h36m.py | 128 ++++ ...-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py | 119 ++++ ...opose3d-27frm-semi-supv_8xb64-200e_h36m.py | 117 ++++ ..._videopose3d-27frm-supv_8xb128-80e_h36m.py | 128 ++++ ..._videopose3d-81frm-supv_8xb128-80e_h36m.py | 128 ++++ .../video_pose_lift/h36m/videopose3d_h36m.yml | 102 ++++ .../video_pose_lift/h36m/videpose3d_h36m.md | 67 +++ demo/body3d_pose_lifter_demo.py | 466 +++++++++++++++ demo/docs/3d_human_pose_demo.md | 74 +++ mmpose/apis/__init__.py | 9 +- mmpose/apis/inference.py | 33 + mmpose/apis/inference_3d.py | 255 ++++++++ mmpose/apis/inference_tracking.py | 103 ++++ mmpose/codecs/__init__.py | 4 +- mmpose/codecs/image_pose_lifting.py | 203 +++++++ mmpose/codecs/regression_label.py | 2 +- mmpose/codecs/video_pose_lifting.py | 202 +++++++ mmpose/datasets/datasets/__init__.py | 1 + mmpose/datasets/datasets/base/__init__.py | 3 +- .../datasets/base/base_mocap_dataset.py | 403 +++++++++++++ mmpose/datasets/datasets/body3d/__init__.py | 4 + .../datasets/datasets/body3d/h36m_dataset.py | 259 ++++++++ mmpose/datasets/datasets/utils.py | 5 + mmpose/datasets/transforms/__init__.py | 3 +- mmpose/datasets/transforms/formatting.py | 52 +- .../datasets/transforms/pose3d_transforms.py | 105 ++++ mmpose/evaluation/functional/__init__.py | 6 +- mmpose/evaluation/functional/keypoint_eval.py | 55 ++ mmpose/evaluation/functional/mesh_eval.py | 66 ++ mmpose/evaluation/metrics/__init__.py | 3 +- .../evaluation/metrics/keypoint_3d_metrics.py | 131 ++++ mmpose/models/heads/__init__.py | 6 +- .../models/heads/regression_heads/__init__.py | 4 + .../temporal_regression_head.py | 151 +++++ .../trajectory_regression_head.py | 150 +++++ mmpose/models/pose_estimators/__init__.py | 3 +- mmpose/models/pose_estimators/base.py | 2 + mmpose/models/pose_estimators/pose_lifter.py | 340 +++++++++++ mmpose/structures/keypoint/__init__.py | 4 +- mmpose/structures/keypoint/transforms.py | 57 ++ mmpose/visualization/__init__.py | 4 +- mmpose/visualization/fast_visualizer.py | 78 +++ mmpose/visualization/local_visualizer_3d.py | 563 ++++++++++++++++++ projects/rtmpose/README.md | 35 ++ projects/rtmpose/README_CN.md | 35 ++ .../S1_Directions_1.54138969_000001.jpg | Bin .../S5_SittingDown.54138969_002061.jpg | Bin .../S7_Greeting.55011271_000396.jpg | Bin .../S8_WalkDog_1.55011271_000026.jpg | Bin tests/test_codecs/test_image_pose_lifting.py | 150 +++++ tests/test_codecs/test_video_pose_lifting.py | 156 +++++ .../test_body_datasets/test_h36m_dataset.py | 175 ++++++ .../test_transforms/test_pose3d_transforms.py | 150 +++++ .../test_functional/test_keypoint_eval.py | 357 ++++++----- .../test_metrics/test_keypoint_3d_metrics.py | 70 +++ .../test_fast_visualizer.py | 71 +++ 59 files changed, 5905 insertions(+), 173 deletions(-) create mode 100644 configs/body_3d_keypoint/video_pose_lift/README.md create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml create mode 100644 configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md create mode 100644 demo/body3d_pose_lifter_demo.py create mode 100644 demo/docs/3d_human_pose_demo.md create mode 100644 mmpose/apis/inference_3d.py create mode 100644 mmpose/apis/inference_tracking.py create mode 100644 mmpose/codecs/image_pose_lifting.py create mode 100644 mmpose/codecs/video_pose_lifting.py create mode 100644 mmpose/datasets/datasets/base/base_mocap_dataset.py create mode 100644 mmpose/datasets/datasets/body3d/__init__.py create mode 100644 mmpose/datasets/datasets/body3d/h36m_dataset.py create mode 100644 mmpose/datasets/transforms/pose3d_transforms.py create mode 100644 mmpose/evaluation/functional/mesh_eval.py create mode 100644 mmpose/evaluation/metrics/keypoint_3d_metrics.py create mode 100644 mmpose/models/heads/regression_heads/temporal_regression_head.py create mode 100644 mmpose/models/heads/regression_heads/trajectory_regression_head.py create mode 100644 mmpose/models/pose_estimators/pose_lifter.py create mode 100644 mmpose/visualization/fast_visualizer.py create mode 100644 mmpose/visualization/local_visualizer_3d.py rename tests/data/h36m/{ => S1/S1_Directions_1.54138969}/S1_Directions_1.54138969_000001.jpg (100%) rename tests/data/h36m/{ => S5/S5_SittingDown.54138969}/S5_SittingDown.54138969_002061.jpg (100%) rename tests/data/h36m/{ => S7/S7_Greeting.55011271}/S7_Greeting.55011271_000396.jpg (100%) rename tests/data/h36m/{ => S8/S8_WalkDog_1.55011271}/S8_WalkDog_1.55011271_000026.jpg (100%) create mode 100644 tests/test_codecs/test_image_pose_lifting.py create mode 100644 tests/test_codecs/test_video_pose_lifting.py create mode 100644 tests/test_datasets/test_datasets/test_body_datasets/test_h36m_dataset.py create mode 100644 tests/test_datasets/test_transforms/test_pose3d_transforms.py create mode 100644 tests/test_evaluation/test_metrics/test_keypoint_3d_metrics.py create mode 100644 tests/test_visualization/test_fast_visualizer.py diff --git a/configs/body_3d_keypoint/video_pose_lift/README.md b/configs/body_3d_keypoint/video_pose_lift/README.md new file mode 100644 index 0000000000..c23b69ea7f --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/README.md @@ -0,0 +1,17 @@ +# 3D human pose estimation in video with temporal convolutions and semi-supervised training + +Based on the success of 2d human pose estimation, it directly "lifts" a sequence of 2d keypoints to 3d keypoints. + +## Results and Models + +### Human3.6m Dataset + +| Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | +| :------------------------------------------------------ | :-------------: | :---: | :-----: | :-----: | :------------------------------------------------------: | :-----------------------------------------------------: | +| [VideoPose3D-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py) | 27 | 40.1 | 30.1 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py) | 81 | 39.1 | 29.3 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py) | 243 | | | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D-supervised-CPN](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py) | 1 | 53.0 | 41.3 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft_20210527.log.json) | +| [VideoPose3D-supervised-CPN](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py) | 243 | | | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft_20210527.log.json) | +| [VideoPose3D-semi-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py) | 27 | 57.2 | 42.4 | 54.2 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_20210527.log.json) | +| [VideoPose3D-semi-supervised-CPN](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py) | 27 | 67.3 | 50.4 | 63.6 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft_20210527.log.json) | diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py new file mode 100644 index 0000000000..0cbf89142d --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py @@ -0,0 +1,132 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = dict(max_epochs=80, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict(type='Adam', lr=1e-4)) + +# learning policy +param_scheduler = [ + dict(type='ExponentialLR', gamma=0.98, end=80, by_epoch=True) +] + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=4, + kernel_sizes=(1, 1, 1, 1, 1), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + )) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +train_pipeline = [ + dict( + type='RandomFlipAroundRoot', + keypoints_flip_cfg=dict(), + target_flip_cfg=dict(), + ), + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +train_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_train.npz', + seq_len=1, + causal=False, + pad_video_seq=False, + keypoint_2d_src='detection', + keypoint_2d_det_file='joint_2d_det_files/cpn_ft_h36m_dbb_train.npy', + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + ), +) +val_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=1, + causal=False, + pad_video_seq=False, + keypoint_2d_src='detection', + keypoint_2d_det_file='joint_2d_det_files/cpn_ft_h36m_dbb_test.npy', + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py new file mode 100644 index 0000000000..3ef3df570b --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py @@ -0,0 +1,132 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = dict(max_epochs=200, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict(type='Adam', lr=1e-4)) + +# learning policy +param_scheduler = [ + dict(type='ExponentialLR', gamma=0.98, end=200, by_epoch=True) +] + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=4, + kernel_sizes=(3, 3, 3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + )) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +train_pipeline = [ + dict( + type='RandomFlipAroundRoot', + keypoints_flip_cfg=dict(), + target_flip_cfg=dict(), + ), + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +train_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_train.npz', + seq_len=243, + causal=False, + pad_video_seq=True, + keypoint_2d_src='detection', + keypoint_2d_det_file='joint_2d_det_files/cpn_ft_h36m_dbb_train.npy', + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + ), +) +val_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=243, + causal=False, + pad_video_seq=True, + keypoint_2d_src='detection', + keypoint_2d_det_file='joint_2d_det_files/cpn_ft_h36m_dbb_test.npy', + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py new file mode 100644 index 0000000000..0f311ac5cf --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py @@ -0,0 +1,128 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = dict(max_epochs=80, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict(type='Adam', lr=1e-3)) + +# learning policy +param_scheduler = [ + dict(type='ExponentialLR', gamma=0.975, end=80, by_epoch=True) +] + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=4, + kernel_sizes=(3, 3, 3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + )) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +train_pipeline = [ + dict( + type='RandomFlipAroundRoot', + keypoints_flip_cfg=dict(), + target_flip_cfg=dict(), + ), + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +train_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_train.npz', + seq_len=243, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + ), +) +val_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=243, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py new file mode 100644 index 0000000000..08bcda8ed7 --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py @@ -0,0 +1,119 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = None + +# optimizer + +# learning policy + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=2, + kernel_sizes=(3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + ), + traj_backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=2, + kernel_sizes=(3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + traj_head=dict( + type='TrajectoryRegressionHead', + in_channels=1024, + num_joints=1, + loss=dict(type='MPJPELoss', use_target_weight=True), + decoder=codec, + ), + semi_loss=dict( + type='SemiSupervisionLoss', + joint_parents=[0, 0, 1, 2, 0, 4, 5, 0, 7, 8, 9, 8, 11, 12, 8, 14, 15], + warmup_iterations=1311376 // 64 // 8 * 5), +) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +val_dataloader = dict( + batch_size=64, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=27, + causal=False, + pad_video_seq=True, + keypoint_2d_src='detection', + keypoint_2d_det_file='joint_2d_det_files/cpn_ft_h36m_dbb_test.npy', + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe'), + dict(type='MPJPE', mode='n-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py new file mode 100644 index 0000000000..d145f05b17 --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py @@ -0,0 +1,117 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = None + +# optimizer + +# learning policy + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=2, + kernel_sizes=(3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + ), + traj_backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=2, + kernel_sizes=(3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + traj_head=dict( + type='TrajectoryRegressionHead', + in_channels=1024, + num_joints=1, + loss=dict(type='MPJPELoss', use_target_weight=True), + decoder=codec, + ), + semi_loss=dict( + type='SemiSupervisionLoss', + joint_parents=[0, 0, 1, 2, 0, 4, 5, 0, 7, 8, 9, 8, 11, 12, 8, 14, 15], + warmup_iterations=1311376 // 64 // 8 * 5), +) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +val_dataloader = dict( + batch_size=64, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=27, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe'), + dict(type='MPJPE', mode='n-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py new file mode 100644 index 0000000000..2589b493a6 --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py @@ -0,0 +1,128 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = dict(max_epochs=80, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict(type='Adam', lr=1e-3)) + +# learning policy +param_scheduler = [ + dict(type='ExponentialLR', gamma=0.975, end=80, by_epoch=True) +] + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=2, + kernel_sizes=(3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + )) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +train_pipeline = [ + dict( + type='RandomFlipAroundRoot', + keypoints_flip_cfg=dict(), + target_flip_cfg=dict(), + ), + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +train_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_train.npz', + seq_len=27, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + ), +) +val_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=27, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py new file mode 100644 index 0000000000..f2c27e423d --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py @@ -0,0 +1,128 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = dict(max_epochs=80, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict(type='Adam', lr=1e-3)) + +# learning policy +param_scheduler = [ + dict(type='ExponentialLR', gamma=0.975, end=80, by_epoch=True) +] + +auto_scale_lr = dict(base_batch_size=1024) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1), + logger=dict(type='LoggerHook', interval=20), +) + +# codec settings +codec = dict( + type='VideoPoseLifting', + num_keypoints=17, + zero_center=True, + root_index=0, + remove_root=False) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=3, + kernel_sizes=(3, 3, 3, 3), + dropout=0.25, + use_stride_conv=True, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=17, + loss=dict(type='MPJPELoss'), + decoder=codec, + )) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +train_pipeline = [ + dict( + type='RandomFlipAroundRoot', + keypoints_flip_cfg=dict(), + target_flip_cfg=dict(), + ), + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] +val_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root')) +] + +# data loaders +train_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_train.npz', + seq_len=81, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + ), +) +val_dataloader = dict( + batch_size=128, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=81, + causal=False, + pad_video_seq=True, + camera_param_file='annotation_body3d/cameras.pkl', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=val_pipeline, + test_mode=True, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml b/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml new file mode 100644 index 0000000000..0703111b1b --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml @@ -0,0 +1,102 @@ +Collections: +- Name: VideoPose3D + Paper: + Title: 3d human pose estimation in video with temporal convolutions and semi-supervised + training + URL: http://openaccess.thecvf.com/content_CVPR_2019/html/Pavllo_3D_Human_Pose_Estimation_in_Video_With_Temporal_Convolutions_and_CVPR_2019_paper.html + README: https://github.com/open-mmlab/mmpose/blob/main/docs/en/papers/algorithms/videopose3d.md +Models: +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: &id001 + - VideoPose3D + Training Data: Human3.6M + Name: vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 40.0 + P-MPJPE: 30.1 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: *id001 + Training Data: Human3.6M + Name: vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 38.9 + P-MPJPE: 29.2 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: *id001 + Training Data: Human3.6M + Name: vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 37.6 + P-MPJPE: 28.3 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: *id001 + Training Data: Human3.6M + Name: vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 52.9 + P-MPJPE: 41.3 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: *id001 + Training Data: Human3.6M + Name: vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 47.9 + P-MPJPE: 38.0 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: *id001 + Training Data: Human3.6M + Name: vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 58.1 + N-MPJPE: 54.7 + P-MPJPE: 42.8 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth +- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py + In Collection: VideoPose3D + Metadata: + Architecture: *id001 + Training Data: Human3.6M + Name: vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 67.4 + N-MPJPE: 63.2 + P-MPJPE: 50.1 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md b/configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md new file mode 100644 index 0000000000..c36ef29df9 --- /dev/null +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md @@ -0,0 +1,67 @@ + + +
+ +VideoPose3D (CVPR'2019) + +```bibtex +@inproceedings{pavllo20193d, +title={3d human pose estimation in video with temporal convolutions and semi-supervised training}, +author={Pavllo, Dario and Feichtenhofer, Christoph and Grangier, David and Auli, Michael}, +booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, +pages={7753--7762}, +year={2019} +} +``` + +
+ + + +
+Human3.6M (TPAMI'2014) + +```bibtex +@article{h36m_pami, +author = {Ionescu, Catalin and Papava, Dragos and Olaru, Vlad and Sminchisescu, Cristian}, +title = {Human3.6M: Large Scale Datasets and Predictive Methods for 3D Human Sensing in Natural Environments}, +journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, +publisher = {IEEE Computer Society}, +volume = {36}, +number = {7}, +pages = {1325-1339}, +month = {jul}, +year = {2014} +} +``` + +
+ +Testing results on Human3.6M dataset with ground truth 2D detections, supervised training + +| Arch | Receptive Field | MPJPE | P-MPJPE | ckpt | log | +| :--------------------------------------------------------- | :-------------: | :---: | :-----: | :--------------------------------------------------------: | :-------------------------------------------------------: | +| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py) | 27 | 40.1 | 30.1 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py) | 81 | 39.1 | 29.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py) | 243 | | | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_20210527.log.json) | + +Testing results on Human3.6M dataset with CPN 2D detections1, supervised training + +| Arch | Receptive Field | MPJPE | P-MPJPE | ckpt | log | +| :--------------------------------------------------------- | :-------------: | :---: | :-----: | :--------------------------------------------------------: | :-------------------------------------------------------: | +| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py) | 1 | 53.0 | 41.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py) | 243 | | | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft_20210527.log.json) | + +Testing results on Human3.6M dataset with ground truth 2D detections, semi-supervised training + +| Training Data | Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | +| :------------ | :-------------------------------------------------: | :-------------: | :---: | :-----: | :-----: | :-------------------------------------------------: | :-------------------------------------------------: | +| 10% S1 | [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py) | 27 | 57.2 | 42.4 | 54.2 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_20210527.log.json) | + +Testing results on Human3.6M dataset with CPN 2D detections1, semi-supervised training + +| Training Data | Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | +| :------------ | :-------------------------------------------------: | :-------------: | :---: | :-----: | :-----: | :-------------------------------------------------: | :-------------------------------------------------: | +| 10% S1 | [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py) | 27 | 67.3 | 50.4 | 63.6 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft_20210527.log.json) | + +1 CPN 2D detections are provided by [official repo](https://github.com/facebookresearch/VideoPose3D/blob/master/DATASETS.md). The reformatted version used in this repository can be downloaded from [train_detection](https://download.openmmlab.com/mmpose/body3d/videopose/cpn_ft_h36m_dbb_train.npy) and [test_detection](https://download.openmmlab.com/mmpose/body3d/videopose/cpn_ft_h36m_dbb_test.npy). diff --git a/demo/body3d_pose_lifter_demo.py b/demo/body3d_pose_lifter_demo.py new file mode 100644 index 0000000000..02e3014f21 --- /dev/null +++ b/demo/body3d_pose_lifter_demo.py @@ -0,0 +1,466 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import os.path as osp +from argparse import ArgumentParser +from functools import partial + +import cv2 +import mmcv +import numpy as np +from mmengine.registry import init_default_scope +from mmengine.structures import InstanceData + +from mmpose.apis import (_track_by_iou, _track_by_oks, collect_multi_frames, + extract_pose_sequence, inference_pose_lifter_model, + inference_topdown, init_model) +from mmpose.models.pose_estimators import PoseLifter +from mmpose.models.pose_estimators.topdown import TopdownPoseEstimator +from mmpose.registry import VISUALIZERS +from mmpose.structures import PoseDataSample, merge_data_samples +from mmpose.utils import adapt_mmdet_pipeline + +try: + from mmdet.apis import inference_detector, init_detector + has_mmdet = True +except (ImportError, ModuleNotFoundError): + has_mmdet = False + + +def convert_keypoint_definition(keypoints, pose_det_dataset, + pose_lift_dataset): + """Convert pose det dataset keypoints definition to pose lifter dataset + keypoints definition, so that they are compatible with the definitions + required for 3D pose lifting. + + Args: + keypoints (ndarray[N, K, 2 or 3]): 2D keypoints to be transformed. + pose_det_dataset, (str): Name of the dataset for 2D pose detector. + pose_lift_dataset (str): Name of the dataset for pose lifter model. + + Returns: + ndarray[K, 2 or 3]: the transformed 2D keypoints. + """ + assert pose_lift_dataset in [ + 'Human36mDataset'], '`pose_lift_dataset` should be ' \ + f'`Human36mDataset`, but got {pose_lift_dataset}.' + + coco_style_datasets = [ + 'CocoDataset', 'PoseTrack18VideoDataset', 'PoseTrack18Dataset' + ] + keypoints_new = np.zeros((keypoints.shape[0], 17, keypoints.shape[2]), + dtype=keypoints.dtype) + if pose_lift_dataset == 'Human36mDataset': + if pose_det_dataset in ['Human36mDataset']: + keypoints_new = keypoints + elif pose_det_dataset in coco_style_datasets: + # pelvis (root) is in the middle of l_hip and r_hip + keypoints_new[:, 0] = (keypoints[:, 11] + keypoints[:, 12]) / 2 + # thorax is in the middle of l_shoulder and r_shoulder + keypoints_new[:, 8] = (keypoints[:, 5] + keypoints[:, 6]) / 2 + # spine is in the middle of thorax and pelvis + keypoints_new[:, + 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 + # in COCO, head is in the middle of l_eye and r_eye + # in PoseTrack18, head is in the middle of head_bottom and head_top + keypoints_new[:, 10] = (keypoints[:, 1] + keypoints[:, 2]) / 2 + # rearrange other keypoints + keypoints_new[:, [1, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 15, 16]] = \ + keypoints[:, [12, 14, 16, 11, 13, 15, 0, 5, 7, 9, 6, 8, 10]] + elif pose_det_dataset in ['AicDataset']: + # pelvis (root) is in the middle of l_hip and r_hip + keypoints_new[:, 0] = (keypoints[:, 9] + keypoints[:, 6]) / 2 + # thorax is in the middle of l_shoulder and r_shoulder + keypoints_new[:, 8] = (keypoints[:, 3] + keypoints[:, 0]) / 2 + # spine is in the middle of thorax and pelvis + keypoints_new[:, + 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 + # neck base (top end of neck) is 1/4 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 9] = (3 * keypoints[:, 13] + keypoints[:, 12]) / 4 + # head (spherical centre of head) is 7/12 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 10] = (5 * keypoints[:, 13] + + 7 * keypoints[:, 12]) / 12 + + keypoints_new[:, [1, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16]] = \ + keypoints[:, [6, 7, 8, 9, 10, 11, 3, 4, 5, 0, 1, 2]] + elif pose_det_dataset in ['CrowdPoseDataset']: + # pelvis (root) is in the middle of l_hip and r_hip + keypoints_new[:, 0] = (keypoints[:, 6] + keypoints[:, 7]) / 2 + # thorax is in the middle of l_shoulder and r_shoulder + keypoints_new[:, 8] = (keypoints[:, 0] + keypoints[:, 1]) / 2 + # spine is in the middle of thorax and pelvis + keypoints_new[:, + 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 + # neck base (top end of neck) is 1/4 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 9] = (3 * keypoints[:, 13] + keypoints[:, 12]) / 4 + # head (spherical centre of head) is 7/12 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 10] = (5 * keypoints[:, 13] + + 7 * keypoints[:, 12]) / 12 + + keypoints_new[:, [1, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16]] = \ + keypoints[:, [7, 9, 11, 6, 8, 10, 0, 2, 4, 1, 3, 5]] + else: + raise NotImplementedError( + f'unsupported conversion between {pose_lift_dataset} and ' + f'{pose_det_dataset}') + + return keypoints_new + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('det_config', help='Config file for detection') + parser.add_argument('det_checkpoint', help='Checkpoint file for detection') + parser.add_argument( + 'pose_estimator_config', + type=str, + default=None, + help='Config file for the 1st stage 2D pose estimator') + parser.add_argument( + 'pose_estimator_checkpoint', + type=str, + default=None, + help='Checkpoint file for the 1st stage 2D pose estimator') + parser.add_argument( + 'pose_lifter_config', + help='Config file for the 2nd stage pose lifter model') + parser.add_argument( + 'pose_lifter_checkpoint', + help='Checkpoint file for the 2nd stage pose lifter model') + parser.add_argument('--input', type=str, default='', help='Video path') + parser.add_argument( + '--show', + action='store_true', + default=False, + help='Whether to show visualizations') + parser.add_argument( + '--rebase-keypoint-height', + action='store_true', + help='Rebase the predicted 3D pose so its lowest keypoint has a ' + 'height of 0 (landing on the ground). This is useful for ' + 'visualization when the model do not predict the global position ' + 'of the 3D pose.') + parser.add_argument( + '--norm-pose-2d', + action='store_true', + help='Scale the bbox (along with the 2D pose) to the average bbox ' + 'scale of the dataset, and move the bbox (along with the 2D pose) to ' + 'the average bbox center of the dataset. This is useful when bbox ' + 'is small, especially in multi-person scenarios.') + parser.add_argument( + '--output-root', + type=str, + default='', + help='Root of the output video file. ' + 'Default not saving the visualization video.') + parser.add_argument( + '--device', default='cuda:0', help='Device used for inference') + parser.add_argument( + '--det-cat-id', + type=int, + default=0, + help='Category id for bounding box detection model') + parser.add_argument( + '--bbox-thr', + type=float, + default=0.9, + help='Bounding box score threshold') + parser.add_argument('--kpt-thr', type=float, default=0.3) + parser.add_argument( + '--use-oks-tracking', action='store_true', help='Using OKS tracking') + parser.add_argument( + '--tracking-thr', type=float, default=0.3, help='Tracking threshold') + parser.add_argument( + '--thickness', + type=int, + default=1, + help='Link thickness for visualization') + parser.add_argument( + '--radius', + type=int, + default=3, + help='Keypoint radius for visualization') + parser.add_argument( + '--use-multi-frames', + action='store_true', + default=False, + help='whether to use multi frames for inference in the 2D pose' + 'detection stage. Default: False.') + + args = parser.parse_args() + return args + + +def get_area(results): + for i, data_sample in enumerate(results): + pred_instance = data_sample.pred_instances.cpu().numpy() + if 'bboxes' in pred_instance: + bboxes = pred_instance.bboxes + results[i].pred_instances.set_field( + np.array([(bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) + for bbox in bboxes]), 'areas') + else: + keypoints = pred_instance.keypoints + areas, bboxes = [], [] + for keypoint in keypoints: + xmin = np.min(keypoint[:, 0][keypoint[:, 0] > 0], initial=1e10) + xmax = np.max(keypoint[:, 0]) + ymin = np.min(keypoint[:, 1][keypoint[:, 1] > 0], initial=1e10) + ymax = np.max(keypoint[:, 1]) + areas.append((xmax - xmin) * (ymax - ymin)) + bboxes.append([xmin, ymin, xmax, ymax]) + results[i].pred_instances.areas = np.array(areas) + results[i].pred_instances.bboxes = np.array(bboxes) + return results + + +def main(): + assert has_mmdet, 'Please install mmdet to run the demo.' + + args = parse_args() + + assert args.show or (args.output_root != '') + assert args.input != '' + assert args.det_config is not None + assert args.det_checkpoint is not None + + detector = init_detector( + args.det_config, args.det_checkpoint, device=args.device.lower()) + detector.cfg = adapt_mmdet_pipeline(detector.cfg) + + pose_estimator = init_model( + args.pose_estimator_config, + args.pose_estimator_checkpoint, + device=args.device.lower()) + + assert isinstance(pose_estimator, TopdownPoseEstimator), 'Only "TopDown"' \ + 'model is supported for the 1st stage (2D pose detection)' + + det_kpt_color = pose_estimator.dataset_meta.get('keypoint_colors', None) + det_dataset_skeleton = pose_estimator.dataset_meta.get( + 'skeleton_links', None) + det_dataset_link_color = pose_estimator.dataset_meta.get( + 'skeleton_link_colors', None) + + # frame index offsets for inference, used in multi-frame inference setting + if args.use_multi_frames: + assert 'frame_indices' in pose_estimator.cfg.test_dataloader.dataset + indices = pose_estimator.cfg.test_dataloader.dataset[ + 'frame_indices_test'] + + pose_det_dataset = pose_estimator.cfg.test_dataloader.dataset + + pose_lifter = init_model( + args.pose_lifter_config, + args.pose_lifter_checkpoint, + device=args.device.lower()) + + assert isinstance(pose_lifter, PoseLifter), \ + 'Only "PoseLifter" model is supported for the 2nd stage ' \ + '(2D-to-3D lifting)' + pose_lift_dataset = pose_lifter.cfg.test_dataloader.dataset + + pose_lifter.cfg.visualizer.radius = args.radius + pose_lifter.cfg.visualizer.line_width = args.thickness + local_visualizer = VISUALIZERS.build(pose_lifter.cfg.visualizer) + + # the dataset_meta is loaded from the checkpoint + local_visualizer.set_dataset_meta(pose_lifter.dataset_meta) + + init_default_scope(pose_lifter.cfg.get('default_scope', 'mmpose')) + + if args.output_root == '': + save_out_video = False + else: + os.makedirs(args.output_root, exist_ok=True) + save_out_video = True + + if save_out_video: + fourcc = cv2.VideoWriter_fourcc(*'mp4v') + video_writer = None + + pose_est_results_list = [] + next_id = 0 + pose_est_results = [] + + video = cv2.VideoCapture(args.input) + assert video.isOpened(), f'Failed to load video file {args.input}' + + (major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.') + if int(major_ver) < 3: + fps = video.get(cv2.cv.CV_CAP_PROP_FPS) + width = video.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) + height = video.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) + else: + fps = video.get(cv2.CAP_PROP_FPS) + width = video.get(cv2.CAP_PROP_FRAME_WIDTH) + height = video.get(cv2.CAP_PROP_FRAME_HEIGHT) + + frame_idx = -1 + + while video.isOpened(): + success, frame = video.read() + frame_idx += 1 + + if not success: + break + + pose_est_results_last = pose_est_results + + # First stage: 2D pose detection + # test a single image, the resulting box is (x1, y1, x2, y2) + det_result = inference_detector(detector, frame) + pred_instance = det_result.pred_instances.cpu().numpy() + + bboxes = pred_instance.bboxes + bboxes = bboxes[np.logical_and(pred_instance.labels == args.det_cat_id, + pred_instance.scores > args.bbox_thr)] + + if args.use_multi_frames: + frames = collect_multi_frames(video, frame_idx, indices, + args.online) + + # make person results for current image + pose_est_results = inference_topdown( + pose_estimator, frames if args.use_multi_frames else frame, bboxes) + + pose_est_results = get_area(pose_est_results) + if args.use_oks_tracking: + _track = partial(_track_by_oks) + else: + _track = _track_by_iou + + for i, result in enumerate(pose_est_results): + track_id, pose_est_results_last, match_result = _track( + result, pose_est_results_last, args.tracking_thr) + if track_id == -1: + pred_instances = result.pred_instances.cpu().numpy() + keypoints = pred_instances.keypoints + if np.count_nonzero(keypoints[:, :, 1]) >= 3: + pose_est_results[i].set_field(next_id, 'track_id') + next_id += 1 + else: + # If the number of keypoints detected is small, + # delete that person instance. + keypoints[:, :, 1] = -10 + pose_est_results[i].pred_instances.set_field( + keypoints, 'keypoints') + bboxes = pred_instances.bboxes * 0 + pose_est_results[i].pred_instances.set_field( + bboxes, 'bboxes') + pose_est_results[i].set_field(-1, 'track_id') + pose_est_results[i].set_field(pred_instances, + 'pred_instances') + else: + pose_est_results[i].set_field(track_id, 'track_id') + + del match_result + + pose_est_results_converted = [] + for pose_est_result in pose_est_results: + pose_est_result_converted = PoseDataSample() + gt_instances = InstanceData() + pred_instances = InstanceData() + for k in pose_est_result.gt_instances.keys(): + gt_instances.set_field(pose_est_result.gt_instances[k], k) + for k in pose_est_result.pred_instances.keys(): + pred_instances.set_field(pose_est_result.pred_instances[k], k) + pose_est_result_converted.gt_instances = gt_instances + pose_est_result_converted.pred_instances = pred_instances + pose_est_result_converted.track_id = pose_est_result.track_id + pose_est_results_converted.append(pose_est_result_converted) + + for i, result in enumerate(pose_est_results_converted): + keypoints = result.pred_instances.keypoints + keypoints = convert_keypoint_definition(keypoints, + pose_det_dataset['type'], + pose_lift_dataset['type']) + pose_est_results_converted[i].pred_instances.keypoints = keypoints + + pose_est_results_list.append(pose_est_results_converted.copy()) + + # extract and pad input pose2d sequence + pose_results_2d = extract_pose_sequence( + pose_est_results_list, + frame_idx=frame_idx, + causal=pose_lift_dataset.get('causal', False), + seq_len=pose_lift_dataset.get('seq_len', 1), + step=pose_lift_dataset.get('seq_step', 1)) + + # Second stage: Pose lifting + # 2D-to-3D pose lifting + pose_lift_results = inference_pose_lifter_model( + pose_lifter, + pose_results_2d, + image_size=(width, height), + norm_pose_2d=args.norm_pose_2d) + + # Pose processing + for idx, pose_lift_res in enumerate(pose_lift_results): + gt_instances = pose_lift_res.gt_instances + + pose_lift_res.track_id = pose_est_results_converted[i].get( + 'track_id', 1e4) + + pred_instances = pose_lift_res.pred_instances + keypoints = pred_instances.keypoints + + keypoints = keypoints[..., [0, 2, 1]] + keypoints[..., 0] = -keypoints[..., 0] + keypoints[..., 2] = -keypoints[..., 2] + + # rebase height (z-axis) + if args.rebase_keypoint_height: + keypoints[..., 2] -= np.min( + keypoints[..., 2], axis=-1, keepdims=True) + + pose_lift_results[i].pred_instances.keypoints = keypoints + + pose_lift_results = sorted( + pose_lift_results, key=lambda x: x.get('track_id', 1e4)) + + pred_3d_data_samples = merge_data_samples(pose_lift_results) + + # Visualization + frame = mmcv.bgr2rgb(frame) + + det_data_sample = merge_data_samples(pose_est_results) + + local_visualizer.add_datasample( + 'result', + frame, + data_sample=pred_3d_data_samples, + det_data_sample=det_data_sample, + draw_gt=False, + det_kpt_color=det_kpt_color, + det_dataset_skeleton=det_dataset_skeleton, + det_dataset_link_color=det_dataset_link_color, + show=args.show, + draw_bbox=True, + kpt_thr=args.kpt_thr, + wait_time=0.001) + + frame_vis = local_visualizer.get_image() + + if save_out_video: + if video_writer is None: + # the size of the image with visualization may vary + # depending on the presence of heatmaps + video_writer = cv2.VideoWriter( + osp.join(args.output_root, + f'vis_{osp.basename(args.input)}'), fourcc, fps, + (frame_vis.shape[1], frame_vis.shape[0])) + + video_writer.write(mmcv.rgb2bgr(frame_vis)) + + video.release() + + if video_writer: + video_writer.release() + + +if __name__ == '__main__': + main() diff --git a/demo/docs/3d_human_pose_demo.md b/demo/docs/3d_human_pose_demo.md new file mode 100644 index 0000000000..eb2eab92ae --- /dev/null +++ b/demo/docs/3d_human_pose_demo.md @@ -0,0 +1,74 @@ +## 3D Human Pose Demo + +
+ +### 3D Human Pose Two-stage Estimation Video Demo + +#### Using mmdet for human bounding box detection and top-down model for the 1st stage (2D pose detection), and inference the 2nd stage (2D-to-3D lifting) + +Assume that you have already installed [mmdet](https://github.com/open-mmlab/mmdetection). + +```shell +python demo/body3d_pose_lifter_demo.py \ +${MMDET_CONFIG_FILE} \ +${MMDET_CHECKPOINT_FILE} \ +${MMPOSE_CONFIG_FILE_2D} \ +${MMPOSE_CHECKPOINT_FILE_2D} \ +${MMPOSE_CONFIG_FILE_3D} \ +${MMPOSE_CHECKPOINT_FILE_3D} \ +--input ${VIDEO_PATH} \ +[--show] \ +[--rebase-keypoint-height] \ +[--norm-pose-2d] \ +[--output-root ${OUT_VIDEO_ROOT}] \ +[--device ${GPU_ID or CPU}] \ +[--det-cat-id DET_CAT_ID] \ +[--bbox-thr BBOX_THR] \ +[--kpt-thr KPT_THR] \ +[--use-oks-tracking] \ +[--tracking-thr TRACKING_THR] \ +[--thickness THICKNESS] \ +[--radius RADIUS] \ +[--use-multi-frames] [--online] +``` + +Note that + +1. `${VIDEO_PATH}` can be the local path or **URL** link to video file. + +2. You can turn on the `[--use-multi-frames]` option to use multi frames for inference in the 2D pose detection stage. + +3. If the `[--online]` option is set to **True**, future frame information can **not** be used when using multi frames for inference in the 2D pose detection stage. + +Examples: + +During 2D pose detection, for single-frame inference that do not rely on extra frames to get the final results of the current frame, try this: + +```shell +python demo/body3d_pose_lifter_demo.py \ +demo/mmdetection_cfg/faster_rcnn_r50_fpn_coco.py \ +https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ +configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192.py \ +https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w48_coco_256x192-b9e0b3ab_20200708.pth \ +configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py \ +https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth \ +--input https://user-images.githubusercontent.com/87690686/164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4 \ +--output-root vis_results \ +--rebase-keypoint-height +``` + +During 2D pose detection, for multi-frame inference that rely on extra frames to get the final results of the current frame, try this: + +```shell +python demo/body3d_pose_lifter_demo.py \ +demo/mmdetection_cfg/faster_rcnn_r50_fpn_coco.py \ +https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ +configs/body_2d_keypoint/topdown_heatmap/posetrack18/td-hm_hrnet-w48_8xb64-20e_posetrack18-384x288.py \ +https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w48_posetrack18_384x288-5fd6d3ff_20211130.pth \ +configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py \ +https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth \ +--input https://user-images.githubusercontent.com/87690686/164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4 \ +--output-root vis_results \ +--rebase-keypoint-height \ +--use-multi-frames --online +``` diff --git a/mmpose/apis/__init__.py b/mmpose/apis/__init__.py index ff7149e453..dcce33742c 100644 --- a/mmpose/apis/__init__.py +++ b/mmpose/apis/__init__.py @@ -1,8 +1,13 @@ # Copyright (c) OpenMMLab. All rights reserved. -from .inference import inference_bottomup, inference_topdown, init_model +from .inference import (collect_multi_frames, inference_bottomup, + inference_topdown, init_model) +from .inference_3d import extract_pose_sequence, inference_pose_lifter_model +from .inference_tracking import _compute_iou, _track_by_iou, _track_by_oks from .inferencers import MMPoseInferencer, Pose2DInferencer __all__ = [ 'init_model', 'inference_topdown', 'inference_bottomup', - 'Pose2DInferencer', 'MMPoseInferencer' + 'collect_multi_frames', 'Pose2DInferencer', 'MMPoseInferencer', + '_track_by_iou', '_track_by_oks', '_compute_iou', + 'inference_pose_lifter_model', 'extract_pose_sequence' ] diff --git a/mmpose/apis/inference.py b/mmpose/apis/inference.py index 6763d318d5..7f733fff45 100644 --- a/mmpose/apis/inference.py +++ b/mmpose/apis/inference.py @@ -223,3 +223,36 @@ def inference_bottomup(model: nn.Module, img: Union[np.ndarray, str]): results = model.test_step(batch) return results + + +def collect_multi_frames(video, frame_id, indices, online=False): + """Collect multi frames from the video. + + Args: + video (mmcv.VideoReader): A VideoReader of the input video file. + frame_id (int): index of the current frame + indices (list(int)): index offsets of the frames to collect + online (bool): inference mode, if set to True, can not use future + frame information. + + Returns: + list(ndarray): multi frames collected from the input video file. + """ + num_frames = len(video) + frames = [] + # put the current frame at first + frames.append(video[frame_id]) + # use multi frames for inference + for idx in indices: + # skip current frame + if idx == 0: + continue + support_idx = frame_id + idx + # online mode, can not use future frame information + if online: + support_idx = np.clip(support_idx, 0, frame_id) + else: + support_idx = np.clip(support_idx, 0, num_frames - 1) + frames.append(video[support_idx]) + + return frames diff --git a/mmpose/apis/inference_3d.py b/mmpose/apis/inference_3d.py new file mode 100644 index 0000000000..5fbc934adc --- /dev/null +++ b/mmpose/apis/inference_3d.py @@ -0,0 +1,255 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import numpy as np +import torch +from mmengine.dataset import Compose, pseudo_collate +from mmengine.registry import init_default_scope +from mmengine.structures import InstanceData + +from mmpose.structures import PoseDataSample + + +def extract_pose_sequence(pose_results, frame_idx, causal, seq_len, step=1): + """Extract the target frame from 2D pose results, and pad the sequence to a + fixed length. + + Args: + pose_results (List[List[:obj:`PoseDataSample`]]): Multi-frame pose + detection results stored in a list. + frame_idx (int): The index of the frame in the original video. + causal (bool): If True, the target frame is the last frame in + a sequence. Otherwise, the target frame is in the middle of + a sequence. + seq_len (int): The number of frames in the input sequence. + step (int): Step size to extract frames from the video. + + Returns: + List[List[:obj:`PoseDataSample`]]: Multi-frame pose detection results + stored in a nested list with a length of seq_len. + """ + if causal: + frames_left = seq_len - 1 + frames_right = 0 + else: + frames_left = (seq_len - 1) // 2 + frames_right = frames_left + num_frames = len(pose_results) + + # get the padded sequence + pad_left = max(0, frames_left - frame_idx // step) + pad_right = max(0, frames_right - (num_frames - 1 - frame_idx) // step) + start = max(frame_idx % step, frame_idx - frames_left * step) + end = min(num_frames - (num_frames - 1 - frame_idx) % step, + frame_idx + frames_right * step + 1) + pose_results_seq = [pose_results[0]] * pad_left + \ + pose_results[start:end:step] + [pose_results[-1]] * pad_right + return pose_results_seq + + +def _collate_pose_sequence(pose_results_2d, + with_track_id=True, + target_frame=-1): + """Reorganize multi-frame pose detection results into individual pose + sequences. + + Note: + - The temporal length of the pose detection results: T + - The number of the person instances: N + - The number of the keypoints: K + - The channel number of each keypoint: C + + Args: + pose_results_2d (List[List[:obj:`PoseDataSample`]]): Multi-frame pose + detection results stored in a nested list. Each element of the + outer list is the pose detection results of a single frame, and + each element of the inner list is the pose information of one + person, which contains: + + - keypoints (ndarray[K, 2 or 3]): x, y, [score] + - track_id (int): unique id of each person, required when + ``with_track_id==True``` + + with_track_id (bool): If True, the element in pose_results is expected + to contain "track_id", which will be used to gather the pose + sequence of a person from multiple frames. Otherwise, the pose + results in each frame are expected to have a consistent number and + order of identities. Default is True. + target_frame (int): The index of the target frame. Default: -1. + + Returns: + List[:obj:`PoseDataSample`]: Indivisual pose sequence in with length N. + """ + T = len(pose_results_2d) + assert T > 0 + + target_frame = (T + target_frame) % T # convert negative index to positive + + N = len( + pose_results_2d[target_frame]) # use identities in the target frame + if N == 0: + return [] + + B, K, C = pose_results_2d[target_frame][0].pred_instances.keypoints.shape + + track_ids = None + if with_track_id: + track_ids = [res.track_id for res in pose_results_2d[target_frame]] + + pose_sequences = [] + for idx in range(N): + pose_seq = PoseDataSample() + gt_instances = InstanceData() + pred_instances = InstanceData() + + for k in pose_results_2d[target_frame][idx].gt_instances.keys(): + gt_instances.set_field( + pose_results_2d[target_frame][idx].gt_instances[k], k) + for k in pose_results_2d[target_frame][idx].pred_instances.keys(): + if k != 'keypoints': + pred_instances.set_field( + pose_results_2d[target_frame][idx].pred_instances[k], k) + pose_seq.pred_instances = pred_instances + pose_seq.gt_instances = gt_instances + + if not with_track_id: + pose_seq.pred_instances.keypoints = np.stack([ + frame[idx].pred_instances.keypoints + for frame in pose_results_2d + ], + axis=1) + else: + keypoints = np.zeros((B, T, K, C), dtype=np.float32) + keypoints[:, target_frame] = pose_results_2d[target_frame][ + idx].pred_instances.keypoints + # find the left most frame containing track_ids[idx] + for frame_idx in range(target_frame - 1, -1, -1): + contains_idx = False + for res in pose_results_2d[frame_idx]: + if res.track_id == track_ids[idx]: + keypoints[:, frame_idx] = res.pred_instances.keypoints + contains_idx = True + break + if not contains_idx: + # replicate the left most frame + keypoints[:, :frame_idx + 1] = keypoints[:, frame_idx + 1] + break + # find the right most frame containing track_idx[idx] + for frame_idx in range(target_frame + 1, T): + contains_idx = False + for res in pose_results_2d[frame_idx]: + if res.track_id == track_ids[idx]: + keypoints[:, frame_idx] = res.pred_instances.keypoints + contains_idx = True + break + if not contains_idx: + # replicate the right most frame + keypoints[:, frame_idx + 1:] = keypoints[:, frame_idx] + break + pose_seq.pred_instances.keypoints = keypoints + pose_sequences.append(pose_seq) + + return pose_sequences + + +def inference_pose_lifter_model(model, + pose_results_2d, + with_track_id=True, + image_size=None, + norm_pose_2d=False): + """Inference 3D pose from 2D pose sequences using a pose lifter model. + + Args: + model (nn.Module): The loaded pose lifter model + pose_results_2d (List[List[:obj:`PoseDataSample`]]): The 2D pose + sequences stored in a nested list. + with_track_id: If True, the element in pose_results_2d is expected to + contain "track_id", which will be used to gather the pose sequence + of a person from multiple frames. Otherwise, the pose results in + each frame are expected to have a consistent number and order of + identities. Default is True. + image_size (tuple|list): image width, image height. If None, image size + will not be contained in dict ``data``. + norm_pose_2d (bool): If True, scale the bbox (along with the 2D + pose) to the average bbox scale of the dataset, and move the bbox + (along with the 2D pose) to the average bbox center of the dataset. + + Returns: + List[:obj:`PoseDataSample`]: 3D pose inference results. Specifically, + the predicted keypoints and scores are saved at + ``data_sample.pred_instances.keypoints_3d``. + """ + init_default_scope(model.cfg.get('default_scope', 'mmpose')) + pipeline = Compose(model.cfg.test_dataloader.dataset.pipeline) + + causal = model.cfg.test_dataloader.dataset.get('causal', False) + target_idx = -1 if causal else len(pose_results_2d) // 2 + + dataset_info = model.dataset_meta + if dataset_info is not None: + if 'stats_info' in dataset_info: + bbox_center = dataset_info['stats_info']['bbox_center'] + bbox_scale = dataset_info['stats_info']['bbox_scale'] + else: + bbox_center = None + bbox_scale = None + + for i, pose_res in enumerate(pose_results_2d): + for j, data_sample in enumerate(pose_res): + kpts = data_sample.pred_instances.keypoints + bboxes = data_sample.pred_instances.bboxes + keypoints = [] + for k in range(len(kpts)): + kpt = kpts[k] + if norm_pose_2d: + bbox = bboxes[k] + center = np.array([[(bbox[0] + bbox[2]) / 2, + (bbox[1] + bbox[3]) / 2]]) + scale = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) + keypoints.append((kpt[:, :2] - center) / scale * + bbox_scale + bbox_center) + else: + keypoints.append(kpt[:, :2]) + pose_results_2d[i][j].pred_instances.keypoints = np.array( + keypoints) + + pose_sequences_2d = _collate_pose_sequence(pose_results_2d, with_track_id, + target_idx) + + if not pose_sequences_2d: + return [] + + data_list = [] + for i, pose_seq in enumerate(pose_sequences_2d): + data_info = dict() + + keypoints_2d = pose_seq.pred_instances.keypoints + keypoints_2d = np.squeeze( + keypoints_2d, axis=0) if keypoints_2d.ndim == 4 else keypoints_2d + + T, K, C = keypoints_2d.shape + + data_info['keypoints'] = keypoints_2d + data_info['keypoints_visible'] = np.ones(( + T, + K, + ), dtype=np.float32) + data_info['lifting_target'] = np.zeros((K, 3), dtype=np.float32) + data_info['lifting_target_visible'] = np.ones((K, 1), dtype=np.float32) + + if image_size is not None: + assert len(image_size) == 2 + data_info['camera_param'] = dict(w=image_size[0], h=image_size[1]) + + data_info.update(model.dataset_meta) + data_list.append(pipeline(data_info)) + + if data_list: + # collate data list into a batch, which is a dict with following keys: + # batch['inputs']: a list of input images + # batch['data_samples']: a list of :obj:`PoseDataSample` + batch = pseudo_collate(data_list) + with torch.no_grad(): + results = model.test_step(batch) + else: + results = [] + + return results diff --git a/mmpose/apis/inference_tracking.py b/mmpose/apis/inference_tracking.py new file mode 100644 index 0000000000..c823adcfc7 --- /dev/null +++ b/mmpose/apis/inference_tracking.py @@ -0,0 +1,103 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings + +import numpy as np + +from mmpose.evaluation.functional.nms import oks_iou + + +def _compute_iou(bboxA, bboxB): + """Compute the Intersection over Union (IoU) between two boxes . + + Args: + bboxA (list): The first bbox info (left, top, right, bottom, score). + bboxB (list): The second bbox info (left, top, right, bottom, score). + + Returns: + float: The IoU value. + """ + + x1 = max(bboxA[0], bboxB[0]) + y1 = max(bboxA[1], bboxB[1]) + x2 = min(bboxA[2], bboxB[2]) + y2 = min(bboxA[3], bboxB[3]) + + inter_area = max(0, x2 - x1) * max(0, y2 - y1) + + bboxA_area = (bboxA[2] - bboxA[0]) * (bboxA[3] - bboxA[1]) + bboxB_area = (bboxB[2] - bboxB[0]) * (bboxB[3] - bboxB[1]) + union_area = float(bboxA_area + bboxB_area - inter_area) + if union_area == 0: + union_area = 1e-5 + warnings.warn('union_area=0 is unexpected') + + iou = inter_area / union_area + + return iou + + +def _track_by_iou(res, results_last, thr): + """Get track id using IoU tracking greedily.""" + + bbox = list(np.squeeze(res.pred_instances.bboxes, axis=0)) + + max_iou_score = -1 + max_index = -1 + match_result = {} + for index, res_last in enumerate(results_last): + bbox_last = list(np.squeeze(res_last.pred_instances.bboxes, axis=0)) + + iou_score = _compute_iou(bbox, bbox_last) + if iou_score > max_iou_score: + max_iou_score = iou_score + max_index = index + + if max_iou_score > thr: + track_id = results_last[max_index].track_id + match_result = results_last[max_index] + del results_last[max_index] + else: + track_id = -1 + + return track_id, results_last, match_result + + +def _track_by_oks(res, results_last, thr, sigmas=None): + """Get track id using OKS tracking greedily.""" + keypoint = np.concatenate((res.pred_instances.keypoints, + res.pred_instances.keypoint_scores[:, :, None]), + axis=2) + keypoint = np.squeeze(keypoint, axis=0).reshape((-1)) + area = np.squeeze(res.pred_instances.areas, axis=0) + max_index = -1 + match_result = {} + + if len(results_last) == 0: + return -1, results_last, match_result + + keypoints_last = np.array([ + np.squeeze( + np.concatenate( + (res_last.pred_instances.keypoints, + res_last.pred_instances.keypoint_scores[:, :, None]), + axis=2), + axis=0).reshape((-1)) for res_last in results_last + ]) + area_last = np.array([ + np.squeeze(res_last.pred_instances.areas, axis=0) + for res_last in results_last + ]) + + oks_score = oks_iou( + keypoint, keypoints_last, area, area_last, sigmas=sigmas) + + max_index = np.argmax(oks_score) + + if oks_score[max_index] > thr: + track_id = results_last[max_index].track_id + match_result = results_last[max_index] + del results_last[max_index] + else: + track_id = -1 + + return track_id, results_last, match_result diff --git a/mmpose/codecs/__init__.py b/mmpose/codecs/__init__.py index a88ebac701..cdbd8feb0c 100644 --- a/mmpose/codecs/__init__.py +++ b/mmpose/codecs/__init__.py @@ -1,6 +1,7 @@ # Copyright (c) OpenMMLab. All rights reserved. from .associative_embedding import AssociativeEmbedding from .decoupled_heatmap import DecoupledHeatmap +from .image_pose_lifting import ImagePoseLifting from .integral_regression_label import IntegralRegressionLabel from .megvii_heatmap import MegviiHeatmap from .msra_heatmap import MSRAHeatmap @@ -8,9 +9,10 @@ from .simcc_label import SimCCLabel from .spr import SPR from .udp_heatmap import UDPHeatmap +from .video_pose_lifting import VideoPoseLifting __all__ = [ 'MSRAHeatmap', 'MegviiHeatmap', 'UDPHeatmap', 'RegressionLabel', 'SimCCLabel', 'IntegralRegressionLabel', 'AssociativeEmbedding', 'SPR', - 'DecoupledHeatmap' + 'DecoupledHeatmap', 'VideoPoseLifting', 'ImagePoseLifting' ] diff --git a/mmpose/codecs/image_pose_lifting.py b/mmpose/codecs/image_pose_lifting.py new file mode 100644 index 0000000000..64bf925997 --- /dev/null +++ b/mmpose/codecs/image_pose_lifting.py @@ -0,0 +1,203 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import numpy as np + +from mmpose.registry import KEYPOINT_CODECS +from .base import BaseKeypointCodec + + +@KEYPOINT_CODECS.register_module() +class ImagePoseLifting(BaseKeypointCodec): + r"""Generate keypoint coordinates for pose lifter. + + Note: + + - instance number: N + - keypoint number: K + - keypoint dimension: D + - pose-lifitng target dimension: C + + Args: + num_keypoints (int): The number of keypoints in the dataset. + root_index (int): Root keypoint index in the pose. + remove_root (bool): If true, remove the root keypoint from the pose. + Default: ``False``. + save_index (bool): If true, store the root position separated from the + original pose. Default: ``False``. + keypoints_mean (np.ndarray, optional): Mean values of keypoints + coordinates in shape (K, D). + keypoints_std (np.ndarray, optional): Std values of keypoints + coordinates in shape (K, D). + target_mean (np.ndarray, optional): Mean values of pose-lifitng target + coordinates in shape (K, C). + target_std (np.ndarray, optional): Std values of pose-lifitng target + coordinates in shape (K, C). + """ + + auxiliary_encode_keys = {'lifting_target', 'lifting_target_visible'} + + def __init__(self, + num_keypoints: int, + root_index: int, + remove_root: bool = False, + save_index: bool = False, + keypoints_mean: Optional[np.ndarray] = None, + keypoints_std: Optional[np.ndarray] = None, + target_mean: Optional[np.ndarray] = None, + target_std: Optional[np.ndarray] = None): + super().__init__() + + self.num_keypoints = num_keypoints + self.root_index = root_index + self.remove_root = remove_root + self.save_index = save_index + if keypoints_mean is not None and keypoints_std is not None: + assert keypoints_mean.shape == keypoints_std.shape + if target_mean is not None and target_std is not None: + assert target_mean.shape == target_std.shape + self.keypoints_mean = keypoints_mean + self.keypoints_std = keypoints_std + self.target_mean = target_mean + self.target_std = target_std + + def encode(self, + keypoints: np.ndarray, + keypoints_visible: Optional[np.ndarray] = None, + lifting_target: Optional[np.ndarray] = None, + lifting_target_visible: Optional[np.ndarray] = None) -> dict: + """Encoding keypoints from input image space to normalized space. + + Args: + keypoints (np.ndarray): Keypoint coordinates in shape (N, K, D). + keypoints_visible (np.ndarray, optional): Keypoint visibilities in + shape (N, K). + lifting_target (np.ndarray, optional): 3d target coordinate in + shape (K, C). + lifting_target_visible (np.ndarray, optional): Target coordinate in + shape (K, ). + + Returns: + encoded (dict): Contains the following items: + + - keypoint_labels (np.ndarray): The processed keypoints in + shape (K * D, N) where D is 2 for 2d coordinates. + - lifting_target_label: The processed target coordinate in + shape (K, C) or (K-1, C). + - lifting_target_weights (np.ndarray): The target weights in + shape (K, ) or (K-1, ). + - trajectory_weights (np.ndarray): The trajectory weights in + shape (K, ). + - target_root (np.ndarray): The root coordinate of target in + shape (C, ). + + In addition, there are some optional items it may contain: + + - target_root_removed (bool): Indicate whether the root of + pose lifting target is removed. Added if ``self.remove_root`` + is ``True``. + - target_root_index (int): An integer indicating the index of + root. Added if ``self.remove_root`` and ``self.save_index`` + are ``True``. + """ + if keypoints_visible is None: + keypoints_visible = np.ones(keypoints.shape[:2], dtype=np.float32) + + if lifting_target is None: + lifting_target = keypoints[0] + + # set initial value for `lifting_target_weights` + # and `trajectory_weights` + if lifting_target_visible is None: + lifting_target_visible = np.ones( + lifting_target.shape[:-1], dtype=np.float32) + lifting_target_weights = lifting_target_visible + trajectory_weights = (1 / lifting_target[:, 2]) + else: + valid = lifting_target_visible > 0.5 + lifting_target_weights = np.where(valid, 1., 0.).astype(np.float32) + trajectory_weights = lifting_target_weights + + encoded = dict() + + # Zero-center the target pose around a given root keypoint + assert (lifting_target.ndim >= 2 and + lifting_target.shape[-2] > self.root_index), \ + f'Got invalid joint shape {lifting_target.shape}' + + root = lifting_target[..., self.root_index, :] + lifting_target_label = lifting_target - root + + if self.remove_root: + lifting_target_label = np.delete( + lifting_target_label, self.root_index, axis=-2) + assert lifting_target_weights.ndim in {1, 2} + axis_to_remove = -2 if lifting_target_weights.ndim == 2 else -1 + lifting_target_weights = np.delete( + lifting_target_weights, self.root_index, axis=axis_to_remove) + # Add a flag to avoid latter transforms that rely on the root + # joint or the original joint index + encoded['target_root_removed'] = True + + # Save the root index which is necessary to restore the global pose + if self.save_index: + encoded['target_root_index'] = self.root_index + + # Normalize the 2D keypoint coordinate with mean and std + keypoint_labels = keypoints.copy() + if self.keypoints_mean is not None and self.keypoints_std is not None: + keypoints_shape = keypoints.shape + assert self.keypoints_mean.shape == keypoints_shape[1:] + + keypoint_labels = (keypoint_labels - + self.keypoints_mean) / self.keypoints_std + if self.target_mean is not None and self.target_std is not None: + target_shape = lifting_target_label.shape + assert self.target_mean.shape == target_shape + + lifting_target_label = (lifting_target_label - + self.target_mean) / self.target_std + + # Generate reshaped keypoint coordinates + assert keypoint_labels.ndim in {2, 3} + if keypoint_labels.ndim == 2: + keypoint_labels = keypoint_labels[None, ...] + + encoded['keypoint_labels'] = keypoint_labels + encoded['lifting_target_label'] = lifting_target_label + encoded['lifting_target_weights'] = lifting_target_weights + encoded['trajectory_weights'] = trajectory_weights + encoded['target_root'] = root + + return encoded + + def decode(self, + encoded: np.ndarray, + target_root: Optional[np.ndarray] = None + ) -> Tuple[np.ndarray, np.ndarray]: + """Decode keypoint coordinates from normalized space to input image + space. + + Args: + encoded (np.ndarray): Coordinates in shape (N, K, C). + target_root (np.ndarray, optional): The target root coordinate. + Default: ``None``. + + Returns: + keypoints (np.ndarray): Decoded coordinates in shape (N, K, C). + scores (np.ndarray): The keypoint scores in shape (N, K). + """ + keypoints = encoded.copy() + + if self.target_mean is not None and self.target_std is not None: + assert self.target_mean.shape == keypoints.shape[1:] + keypoints = keypoints * self.target_std + self.target_mean + + if target_root.size > 0: + keypoints = keypoints + np.expand_dims(target_root, axis=0) + if self.remove_root: + keypoints = np.insert( + keypoints, self.root_index, target_root, axis=1) + scores = np.ones(keypoints.shape[:-1], dtype=np.float32) + + return keypoints, scores diff --git a/mmpose/codecs/regression_label.py b/mmpose/codecs/regression_label.py index 9ae385d2d9..f79195beb4 100644 --- a/mmpose/codecs/regression_label.py +++ b/mmpose/codecs/regression_label.py @@ -78,7 +78,7 @@ def decode(self, encoded: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: Returns: tuple: - keypoints (np.ndarray): Decoded coordinates in shape (N, K, D) - - socres (np.ndarray): The keypoint scores in shape (N, K). + - scores (np.ndarray): The keypoint scores in shape (N, K). It usually represents the confidence of the keypoint prediction """ diff --git a/mmpose/codecs/video_pose_lifting.py b/mmpose/codecs/video_pose_lifting.py new file mode 100644 index 0000000000..56cf35fa2d --- /dev/null +++ b/mmpose/codecs/video_pose_lifting.py @@ -0,0 +1,202 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +from copy import deepcopy +from typing import Optional, Tuple + +import numpy as np + +from mmpose.registry import KEYPOINT_CODECS +from .base import BaseKeypointCodec + + +@KEYPOINT_CODECS.register_module() +class VideoPoseLifting(BaseKeypointCodec): + r"""Generate keypoint coordinates for pose lifter. + + Note: + + - instance number: N + - keypoint number: K + - keypoint dimension: D + - pose-lifitng target dimension: C + + Args: + num_keypoints (int): The number of keypoints in the dataset. + zero_center: Whether to zero-center the target around root. Default: + ``True``. + root_index (int): Root keypoint index in the pose. Default: 0. + remove_root (bool): If true, remove the root keypoint from the pose. + Default: ``False``. + save_index (bool): If true, store the root position separated from the + original pose, only takes effect if ``remove_root`` is ``True``. + Default: ``False``. + normalize_camera (bool): Whether to normalize camera intrinsics. + Default: ``False``. + """ + + auxiliary_encode_keys = { + 'lifting_target', 'lifting_target_visible', 'camera_param' + } + + def __init__(self, + num_keypoints: int, + zero_center: bool = True, + root_index: int = 0, + remove_root: bool = False, + save_index: bool = False, + normalize_camera: bool = False): + super().__init__() + + self.num_keypoints = num_keypoints + self.zero_center = zero_center + self.root_index = root_index + self.remove_root = remove_root + self.save_index = save_index + self.normalize_camera = normalize_camera + + def encode(self, + keypoints: np.ndarray, + keypoints_visible: Optional[np.ndarray] = None, + lifting_target: Optional[np.ndarray] = None, + lifting_target_visible: Optional[np.ndarray] = None, + camera_param: Optional[dict] = None) -> dict: + """Encoding keypoints from input image space to normalized space. + + Args: + keypoints (np.ndarray): Keypoint coordinates in shape (N, K, D). + keypoints_visible (np.ndarray, optional): Keypoint visibilities in + shape (N, K). + lifting_target (np.ndarray, optional): 3d target coordinate in + shape (K, C). + lifting_target_visible (np.ndarray, optional): Target coordinate in + shape (K, ). + camera_param (dict, optional): The camera parameter dictionary. + + Returns: + encoded (dict): Contains the following items: + + - keypoint_labels (np.ndarray): The processed keypoints in + shape (K * D, N) where D is 2 for 2d coordinates. + - lifting_target_label: The processed target coordinate in + shape (K, C) or (K-1, C). + - lifting_target_weights (np.ndarray): The target weights in + shape (K, ) or (K-1, ). + - trajectory_weights (np.ndarray): The trajectory weights in + shape (K, ). + + In addition, there are some optional items it may contain: + + - target_root (np.ndarray): The root coordinate of target in + shape (C, ). Exists if ``self.zero_center`` is ``True``. + - target_root_removed (bool): Indicate whether the root of + pose-lifitng target is removed. Exists if + ``self.remove_root`` is ``True``. + - target_root_index (int): An integer indicating the index of + root. Exists if ``self.remove_root`` and ``self.save_index`` + are ``True``. + - camera_param (dict): The updated camera parameter dictionary. + Exists if ``self.normalize_camera`` is ``True``. + """ + if keypoints_visible is None: + keypoints_visible = np.ones(keypoints.shape[:2], dtype=np.float32) + + if lifting_target is None: + lifting_target = keypoints[0] + + # set initial value for `lifting_target_weights` + # and `trajectory_weights` + if lifting_target_visible is None: + lifting_target_visible = np.ones( + lifting_target.shape[:-1], dtype=np.float32) + lifting_target_weights = lifting_target_visible + trajectory_weights = (1 / lifting_target[:, 2]) + else: + valid = lifting_target_visible > 0.5 + lifting_target_weights = np.where(valid, 1., 0.).astype(np.float32) + trajectory_weights = lifting_target_weights + + if camera_param is None: + camera_param = dict() + + encoded = dict() + + lifting_target_label = lifting_target.copy() + # Zero-center the target pose around a given root keypoint + if self.zero_center: + assert (lifting_target.ndim >= 2 and + lifting_target.shape[-2] > self.root_index), \ + f'Got invalid joint shape {lifting_target.shape}' + + root = lifting_target[..., self.root_index, :] + lifting_target_label = lifting_target_label - root + encoded['target_root'] = root + + if self.remove_root: + lifting_target_label = np.delete( + lifting_target_label, self.root_index, axis=-2) + assert lifting_target_weights.ndim in {1, 2} + axis_to_remove = -2 if lifting_target_weights.ndim == 2 else -1 + lifting_target_weights = np.delete( + lifting_target_weights, + self.root_index, + axis=axis_to_remove) + # Add a flag to avoid latter transforms that rely on the root + # joint or the original joint index + encoded['target_root_removed'] = True + + # Save the root index for restoring the global pose + if self.save_index: + encoded['target_root_index'] = self.root_index + + # Normalize the 2D keypoint coordinate with image width and height + _camera_param = deepcopy(camera_param) + assert 'w' in _camera_param and 'h' in _camera_param + center = np.array([0.5 * _camera_param['w'], 0.5 * _camera_param['h']], + dtype=np.float32) + scale = np.array(0.5 * _camera_param['w'], dtype=np.float32) + + keypoint_labels = (keypoints - center) / scale + + assert keypoint_labels.ndim in {2, 3} + if keypoint_labels.ndim == 2: + keypoint_labels = keypoint_labels[None, ...] + + if self.normalize_camera: + assert 'f' in _camera_param and 'c' in _camera_param + _camera_param['f'] = _camera_param['f'] / scale + _camera_param['c'] = (_camera_param['c'] - center[:, None]) / scale + encoded['camera_param'] = _camera_param + + encoded['keypoint_labels'] = keypoint_labels + encoded['lifting_target_label'] = lifting_target_label + encoded['lifting_target_weights'] = lifting_target_weights + encoded['trajectory_weights'] = trajectory_weights + + return encoded + + def decode(self, + encoded: np.ndarray, + target_root: Optional[np.ndarray] = None + ) -> Tuple[np.ndarray, np.ndarray]: + """Decode keypoint coordinates from normalized space to input image + space. + + Args: + encoded (np.ndarray): Coordinates in shape (N, K, C). + target_root (np.ndarray, optional): The pose-lifitng target root + coordinate. Default: ``None``. + + Returns: + keypoints (np.ndarray): Decoded coordinates in shape (N, K, C). + scores (np.ndarray): The keypoint scores in shape (N, K). + """ + keypoints = encoded.copy() + + if target_root.size > 0: + keypoints = keypoints + np.expand_dims(target_root, axis=0) + if self.remove_root: + keypoints = np.insert( + keypoints, self.root_index, target_root, axis=1) + scores = np.ones(keypoints.shape[:-1], dtype=np.float32) + + return keypoints, scores diff --git a/mmpose/datasets/datasets/__init__.py b/mmpose/datasets/datasets/__init__.py index 03a0f493ca..9f5801753f 100644 --- a/mmpose/datasets/datasets/__init__.py +++ b/mmpose/datasets/datasets/__init__.py @@ -2,6 +2,7 @@ from .animal import * # noqa: F401, F403 from .base import * # noqa: F401, F403 from .body import * # noqa: F401, F403 +from .body3d import * # noqa: F401, F403 from .face import * # noqa: F401, F403 from .fashion import * # noqa: F401, F403 from .hand import * # noqa: F401, F403 diff --git a/mmpose/datasets/datasets/base/__init__.py b/mmpose/datasets/datasets/base/__init__.py index 23bb4efb48..810440530e 100644 --- a/mmpose/datasets/datasets/base/__init__.py +++ b/mmpose/datasets/datasets/base/__init__.py @@ -1,4 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. from .base_coco_style_dataset import BaseCocoStyleDataset +from .base_mocap_dataset import BaseMocapDataset -__all__ = ['BaseCocoStyleDataset'] +__all__ = ['BaseCocoStyleDataset', 'BaseMocapDataset'] diff --git a/mmpose/datasets/datasets/base/base_mocap_dataset.py b/mmpose/datasets/datasets/base/base_mocap_dataset.py new file mode 100644 index 0000000000..d671a6ae94 --- /dev/null +++ b/mmpose/datasets/datasets/base/base_mocap_dataset.py @@ -0,0 +1,403 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from copy import deepcopy +from itertools import filterfalse, groupby +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import numpy as np +from mmengine.dataset import BaseDataset, force_full_init +from mmengine.fileio import exists, get_local_path, load +from mmengine.utils import is_abs +from PIL import Image + +from mmpose.registry import DATASETS +from ..utils import parse_pose_metainfo + + +@DATASETS.register_module() +class BaseMocapDataset(BaseDataset): + """Base class for 3d body datasets. + + Args: + ann_file (str): Annotation file path. Default: ''. + seq_len (int): Number of frames in a sequence. Default: 1. + causal (bool): If set to ``True``, the rightmost input frame will be + the target frame. Otherwise, the middle input frame will be the + target frame. Default: ``True``. + subset_frac (float): The fraction to reduce dataset size. If set to 1, + the dataset size is not reduced. Default: 1. + camera_param_file (str): Cameras' parameters file. Default: ``None``. + data_mode (str): Specifies the mode of data samples: ``'topdown'`` or + ``'bottomup'``. In ``'topdown'`` mode, each data sample contains + one instance; while in ``'bottomup'`` mode, each data sample + contains all instances in a image. Default: ``'topdown'`` + metainfo (dict, optional): Meta information for dataset, such as class + information. Default: ``None``. + data_root (str, optional): The root directory for ``data_prefix`` and + ``ann_file``. Default: ``None``. + data_prefix (dict, optional): Prefix for training data. + Default: ``dict(img='')``. + filter_cfg (dict, optional): Config for filter data. Default: `None`. + indices (int or Sequence[int], optional): Support using first few + data in annotation file to facilitate training/testing on a smaller + dataset. Default: ``None`` which means using all ``data_infos``. + serialize_data (bool, optional): Whether to hold memory using + serialized objects, when enabled, data loader workers can use + shared RAM from master process instead of making a copy. + Default: ``True``. + pipeline (list, optional): Processing pipeline. Default: []. + test_mode (bool, optional): ``test_mode=True`` means in test phase. + Default: ``False``. + lazy_init (bool, optional): Whether to load annotation during + instantiation. In some cases, such as visualization, only the meta + information of the dataset is needed, which is not necessary to + load annotation file. ``Basedataset`` can skip load annotations to + save time by set ``lazy_init=False``. Default: ``False``. + max_refetch (int, optional): If ``Basedataset.prepare_data`` get a + None img. The maximum extra number of cycles to get a valid + image. Default: 1000. + """ + + METAINFO: dict = dict() + + def __init__(self, + ann_file: str = '', + seq_len: int = 1, + causal: bool = True, + subset_frac: float = 1.0, + camera_param_file: Optional[str] = None, + data_mode: str = 'topdown', + metainfo: Optional[dict] = None, + data_root: Optional[str] = None, + data_prefix: dict = dict(img=''), + filter_cfg: Optional[dict] = None, + indices: Optional[Union[int, Sequence[int]]] = None, + serialize_data: bool = True, + pipeline: List[Union[dict, Callable]] = [], + test_mode: bool = False, + lazy_init: bool = False, + max_refetch: int = 1000): + + if data_mode not in {'topdown', 'bottomup'}: + raise ValueError( + f'{self.__class__.__name__} got invalid data_mode: ' + f'{data_mode}. Should be "topdown" or "bottomup".') + self.data_mode = data_mode + + _ann_file = ann_file + if not is_abs(_ann_file): + _ann_file = osp.join(data_root, _ann_file) + assert exists(_ann_file), 'Annotation file does not exist.' + with get_local_path(_ann_file) as local_path: + self.ann_data = np.load(local_path) + + self.camera_param_file = camera_param_file + if self.camera_param_file: + if not is_abs(self.camera_param_file): + self.camera_param_file = osp.join(data_root, + self.camera_param_file) + assert exists(self.camera_param_file) + self.camera_param = load(self.camera_param_file) + + self.seq_len = seq_len + self.causal = causal + + assert 0 < subset_frac <= 1, ( + f'Unsupported `subset_frac` {subset_frac}. Supported range ' + 'is (0, 1].') + self.subset_frac = subset_frac + + self.sequence_indices = self.get_sequence_indices() + + super().__init__( + ann_file=ann_file, + metainfo=metainfo, + data_root=data_root, + data_prefix=data_prefix, + filter_cfg=filter_cfg, + indices=indices, + serialize_data=serialize_data, + pipeline=pipeline, + test_mode=test_mode, + lazy_init=lazy_init, + max_refetch=max_refetch) + + @classmethod + def _load_metainfo(cls, metainfo: dict = None) -> dict: + """Collect meta information from the dictionary of meta. + + Args: + metainfo (dict): Raw data of pose meta information. + + Returns: + dict: Parsed meta information. + """ + + if metainfo is None: + metainfo = deepcopy(cls.METAINFO) + + if not isinstance(metainfo, dict): + raise TypeError( + f'metainfo should be a dict, but got {type(metainfo)}') + + # parse pose metainfo if it has been assigned + if metainfo: + metainfo = parse_pose_metainfo(metainfo) + return metainfo + + @force_full_init + def prepare_data(self, idx) -> Any: + """Get data processed by ``self.pipeline``. + + :class:`BaseCocoStyleDataset` overrides this method from + :class:`mmengine.dataset.BaseDataset` to add the metainfo into + the ``data_info`` before it is passed to the pipeline. + + Args: + idx (int): The index of ``data_info``. + + Returns: + Any: Depends on ``self.pipeline``. + """ + data_info = self.get_data_info(idx) + + return self.pipeline(data_info) + + def get_data_info(self, idx: int) -> dict: + """Get data info by index. + + Args: + idx (int): Index of data info. + + Returns: + dict: Data info. + """ + data_info = super().get_data_info(idx) + + # Add metainfo items that are required in the pipeline and the model + metainfo_keys = [ + 'upper_body_ids', 'lower_body_ids', 'flip_pairs', + 'dataset_keypoint_weights', 'flip_indices', 'skeleton_links' + ] + + for key in metainfo_keys: + assert key not in data_info, ( + f'"{key}" is a reserved key for `metainfo`, but already ' + 'exists in the `data_info`.') + + data_info[key] = deepcopy(self._metainfo[key]) + + return data_info + + def load_data_list(self) -> List[dict]: + """Load data list from COCO annotation file or person detection result + file.""" + + instance_list, image_list = self._load_annotations() + + if self.data_mode == 'topdown': + data_list = self._get_topdown_data_infos(instance_list) + else: + data_list = self._get_bottomup_data_infos(instance_list, + image_list) + + return data_list + + def get_img_info(self, img_idx, img_name): + try: + with get_local_path(osp.join(self.data_prefix['img'], + img_name)) as local_path: + im = Image.open(local_path) + w, h = im.size + im.close() + except: # noqa: E722 + return None + + img = { + 'file_name': img_name, + 'height': h, + 'width': w, + 'id': img_idx, + 'img_id': img_idx, + 'img_path': osp.join(self.data_prefix['img'], img_name), + } + return img + + def get_sequence_indices(self) -> List[List[int]]: + """Build sequence indices. + + The default method creates sample indices that each sample is a single + frame (i.e. seq_len=1). Override this method in the subclass to define + how frames are sampled to form data samples. + + Outputs: + sample_indices: the frame indices of each sample. + For a sample, all frames will be treated as an input sequence, + and the ground-truth pose of the last frame will be the target. + """ + sequence_indices = [] + if self.seq_len == 1: + num_imgs = len(self.ann_data['imgname']) + sequence_indices = [[idx] for idx in range(num_imgs)] + else: + raise NotImplementedError('Multi-frame data sample unsupported!') + return sequence_indices + + def _load_annotations(self) -> Tuple[List[dict], List[dict]]: + """Load data from annotations in COCO format.""" + num_keypoints = self.metainfo['num_keypoints'] + + img_names = self.ann_data['imgname'] + num_imgs = len(img_names) + + if 'S' in self.ann_data.keys(): + kpts_3d = self.ann_data['S'] + else: + kpts_3d = np.zeros((num_imgs, num_keypoints, 4), dtype=np.float32) + + if 'part' in self.ann_data.keys(): + kpts_2d = self.ann_data['part'] + else: + kpts_2d = np.zeros((num_imgs, num_keypoints, 3), dtype=np.float32) + + if 'center' in self.ann_data.keys(): + centers = self.ann_data['center'] + else: + centers = np.zeros((num_imgs, 2), dtype=np.float32) + + if 'scale' in self.ann_data.keys(): + scales = self.ann_data['scale'].astype(np.float32) + else: + scales = np.zeros(num_imgs, dtype=np.float32) + + instance_list = [] + image_list = [] + + for idx, frame_ids in enumerate(self.sequence_indices): + assert len(frame_ids) == self.seq_len + + _img_names = img_names[frame_ids] + + _keypoints = kpts_2d[frame_ids].astype(np.float32) + keypoints = _keypoints[..., :2] + keypoints_visible = _keypoints[..., 2] + + _keypoints_3d = kpts_3d[frame_ids].astype(np.float32) + keypoints_3d = _keypoints_3d[..., :3] + keypoints_3d_visible = _keypoints_3d[..., 3] + + target_idx = -1 if self.causal else int(self.seq_len) // 2 + + instance_info = { + 'num_keypoints': num_keypoints, + 'keypoints': keypoints, + 'keypoints_visible': keypoints_visible, + 'keypoints_3d': keypoints_3d, + 'keypoints_3d_visible': keypoints_3d_visible, + 'scale': scales[idx], + 'center': centers[idx].astype(np.float32).reshape(1, -1), + 'id': idx, + 'category_id': 1, + 'iscrowd': 0, + 'img_paths': list(_img_names), + 'img_ids': frame_ids, + 'lifting_target': keypoints_3d[target_idx], + 'lifting_target_visible': keypoints_3d_visible[target_idx], + 'target_img_path': _img_names[target_idx], + } + + if self.camera_param_file: + _cam_param = self.get_camera_param(_img_names[0]) + instance_info['camera_param'] = _cam_param + + instance_list.append(instance_info) + + for idx, imgname in enumerate(img_names): + img_info = self.get_img_info(idx, imgname) + image_list.append(img_info) + + return instance_list, image_list + + def get_camera_param(self, imgname): + """Get camera parameters of a frame by its image name. + + Override this method to specify how to get camera parameters. + """ + raise NotImplementedError + + @staticmethod + def _is_valid_instance(data_info: Dict) -> bool: + """Check a data info is an instance with valid bbox and keypoint + annotations.""" + # crowd annotation + if 'iscrowd' in data_info and data_info['iscrowd']: + return False + # invalid keypoints + if 'num_keypoints' in data_info and data_info['num_keypoints'] == 0: + return False + # invalid keypoints + if 'keypoints' in data_info: + if np.max(data_info['keypoints']) <= 0: + return False + return True + + def _get_topdown_data_infos(self, instance_list: List[Dict]) -> List[Dict]: + """Organize the data list in top-down mode.""" + # sanitize data samples + data_list_tp = list(filter(self._is_valid_instance, instance_list)) + + return data_list_tp + + def _get_bottomup_data_infos(self, instance_list: List[Dict], + image_list: List[Dict]) -> List[Dict]: + """Organize the data list in bottom-up mode.""" + + # bottom-up data list + data_list_bu = [] + + used_img_ids = set() + + # group instances by img_id + for img_ids, data_infos in groupby(instance_list, + lambda x: x['img_ids']): + for img_id in img_ids: + used_img_ids.add(img_id) + data_infos = list(data_infos) + + # image data + img_paths = data_infos[0]['img_paths'] + data_info_bu = { + 'img_ids': img_ids, + 'img_paths': img_paths, + } + + for key in data_infos[0].keys(): + if key not in data_info_bu: + seq = [d[key] for d in data_infos] + if isinstance(seq[0], np.ndarray): + seq = np.concatenate(seq, axis=0) + data_info_bu[key] = seq + + # The segmentation annotation of invalid objects will be used + # to generate valid region mask in the pipeline. + invalid_segs = [] + for data_info_invalid in filterfalse(self._is_valid_instance, + data_infos): + if 'segmentation' in data_info_invalid: + invalid_segs.append(data_info_invalid['segmentation']) + data_info_bu['invalid_segs'] = invalid_segs + + data_list_bu.append(data_info_bu) + + # add images without instance for evaluation + if self.test_mode: + for img_info in image_list: + if img_info['img_id'] not in used_img_ids: + data_info_bu = { + 'img_ids': [img_info['img_id']], + 'img_path': [img_info['img_path']], + 'id': list(), + } + data_list_bu.append(data_info_bu) + + return data_list_bu diff --git a/mmpose/datasets/datasets/body3d/__init__.py b/mmpose/datasets/datasets/body3d/__init__.py new file mode 100644 index 0000000000..d5afeca578 --- /dev/null +++ b/mmpose/datasets/datasets/body3d/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .h36m_dataset import Human36mDataset + +__all__ = ['Human36mDataset'] diff --git a/mmpose/datasets/datasets/body3d/h36m_dataset.py b/mmpose/datasets/datasets/body3d/h36m_dataset.py new file mode 100644 index 0000000000..60094aa254 --- /dev/null +++ b/mmpose/datasets/datasets/body3d/h36m_dataset.py @@ -0,0 +1,259 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from collections import defaultdict +from typing import Callable, List, Optional, Sequence, Tuple, Union + +import numpy as np +from mmengine.fileio import exists, get_local_path +from mmengine.utils import is_abs + +from mmpose.datasets.datasets import BaseMocapDataset +from mmpose.registry import DATASETS + + +@DATASETS.register_module() +class Human36mDataset(BaseMocapDataset): + """Human3.6M dataset for 3D human pose estimation. + + "Human3.6M: Large Scale Datasets and Predictive Methods for 3D Human + Sensing in Natural Environments", TPAMI`2014. + More details can be found in the `paper + `__. + + Human3.6M keypoint indexes:: + + 0: 'root (pelvis)', + 1: 'right_hip', + 2: 'right_knee', + 3: 'right_foot', + 4: 'left_hip', + 5: 'left_knee', + 6: 'left_foot', + 7: 'spine', + 8: 'thorax', + 9: 'neck_base', + 10: 'head', + 11: 'left_shoulder', + 12: 'left_elbow', + 13: 'left_wrist', + 14: 'right_shoulder', + 15: 'right_elbow', + 16: 'right_wrist' + + Args: + ann_file (str): Annotation file path. Default: ''. + seq_len (int): Number of frames in a sequence. Default: 1. + seq_step (int): The interval for extracting frames from the video. + Default: 1. + pad_video_seq (bool): Whether to pad the video so that poses will be + predicted for every frame in the video. Default: ``False``. + causal (bool): If set to ``True``, the rightmost input frame will be + the target frame. Otherwise, the middle input frame will be the + target frame. Default: ``True``. + subset_frac (float): The fraction to reduce dataset size. If set to 1, + the dataset size is not reduced. Default: 1. + keypoint_2d_src (str): Specifies 2D keypoint information options, which + should be one of the following options: + + - ``'gt'``: load from the annotation file + - ``'detection'``: load from a detection + result file of 2D keypoint + - 'pipeline': the information will be generated by the pipeline + + Default: ``'gt'``. + keypoint_2d_det_file (str, optional): The 2D keypoint detection file. + If set, 2d keypoint loaded from this file will be used instead of + ground-truth keypoints. This setting is only when + ``keypoint_2d_src`` is ``'detection'``. Default: ``None``. + camera_param_file (str): Cameras' parameters file. Default: ``None``. + data_mode (str): Specifies the mode of data samples: ``'topdown'`` or + ``'bottomup'``. In ``'topdown'`` mode, each data sample contains + one instance; while in ``'bottomup'`` mode, each data sample + contains all instances in a image. Default: ``'topdown'`` + metainfo (dict, optional): Meta information for dataset, such as class + information. Default: ``None``. + data_root (str, optional): The root directory for ``data_prefix`` and + ``ann_file``. Default: ``None``. + data_prefix (dict, optional): Prefix for training data. + Default: ``dict(img='')``. + filter_cfg (dict, optional): Config for filter data. Default: `None`. + indices (int or Sequence[int], optional): Support using first few + data in annotation file to facilitate training/testing on a smaller + dataset. Default: ``None`` which means using all ``data_infos``. + serialize_data (bool, optional): Whether to hold memory using + serialized objects, when enabled, data loader workers can use + shared RAM from master process instead of making a copy. + Default: ``True``. + pipeline (list, optional): Processing pipeline. Default: []. + test_mode (bool, optional): ``test_mode=True`` means in test phase. + Default: ``False``. + lazy_init (bool, optional): Whether to load annotation during + instantiation. In some cases, such as visualization, only the meta + information of the dataset is needed, which is not necessary to + load annotation file. ``Basedataset`` can skip load annotations to + save time by set ``lazy_init=False``. Default: ``False``. + max_refetch (int, optional): If ``Basedataset.prepare_data`` get a + None img. The maximum extra number of cycles to get a valid + image. Default: 1000. + """ + + METAINFO: dict = dict(from_file='configs/_base_/datasets/h36m.py') + SUPPORTED_keypoint_2d_src = {'gt', 'detection', 'pipeline'} + + def __init__(self, + ann_file: str = '', + seq_len: int = 1, + seq_step: int = 1, + pad_video_seq: bool = False, + causal: bool = True, + subset_frac: float = 1.0, + keypoint_2d_src: str = 'gt', + keypoint_2d_det_file: Optional[str] = None, + camera_param_file: Optional[str] = None, + data_mode: str = 'topdown', + metainfo: Optional[dict] = None, + data_root: Optional[str] = None, + data_prefix: dict = dict(img=''), + filter_cfg: Optional[dict] = None, + indices: Optional[Union[int, Sequence[int]]] = None, + serialize_data: bool = True, + pipeline: List[Union[dict, Callable]] = [], + test_mode: bool = False, + lazy_init: bool = False, + max_refetch: int = 1000): + # check keypoint_2d_src + self.keypoint_2d_src = keypoint_2d_src + if self.keypoint_2d_src not in self.SUPPORTED_keypoint_2d_src: + raise ValueError( + f'Unsupported `keypoint_2d_src` "{self.keypoint_2d_src}". ' + f'Supported options are {self.SUPPORTED_keypoint_2d_src}') + + if keypoint_2d_det_file: + if not is_abs(keypoint_2d_det_file): + self.keypoint_2d_det_file = osp.join(data_root, + keypoint_2d_det_file) + else: + self.keypoint_2d_det_file = keypoint_2d_det_file + + self.seq_step = seq_step + self.pad_video_seq = pad_video_seq + + super().__init__( + ann_file=ann_file, + seq_len=seq_len, + causal=causal, + subset_frac=subset_frac, + camera_param_file=camera_param_file, + data_mode=data_mode, + metainfo=metainfo, + data_root=data_root, + data_prefix=data_prefix, + filter_cfg=filter_cfg, + indices=indices, + serialize_data=serialize_data, + pipeline=pipeline, + test_mode=test_mode, + lazy_init=lazy_init, + max_refetch=max_refetch) + + def get_sequence_indices(self) -> List[List[int]]: + """Split original videos into sequences and build frame indices. + + This method overrides the default one in the base class. + """ + imgnames = self.ann_data['imgname'] + video_frames = defaultdict(list) + for idx, imgname in enumerate(imgnames): + subj, action, camera = self._parse_h36m_imgname(imgname) + video_frames[(subj, action, camera)].append(idx) + + # build sample indices + sequence_indices = [] + _len = (self.seq_len - 1) * self.seq_step + 1 + _step = self.seq_step + for _, _indices in sorted(video_frames.items()): + n_frame = len(_indices) + + if self.pad_video_seq: + # Pad the sequence so that every frame in the sequence will be + # predicted. + if self.causal: + frames_left = self.seq_len - 1 + frames_right = 0 + else: + frames_left = (self.seq_len - 1) // 2 + frames_right = frames_left + for i in range(n_frame): + pad_left = max(0, frames_left - i // _step) + pad_right = max(0, + frames_right - (n_frame - 1 - i) // _step) + start = max(i % _step, i - frames_left * _step) + end = min(n_frame - (n_frame - 1 - i) % _step, + i + frames_right * _step + 1) + sequence_indices.append([_indices[0]] * pad_left + + _indices[start:end:_step] + + [_indices[-1]] * pad_right) + else: + seqs_from_video = [ + _indices[i:(i + _len):_step] + for i in range(0, n_frame - _len + 1) + ] + sequence_indices.extend(seqs_from_video) + + # reduce dataset size if needed + subset_size = int(len(sequence_indices) * self.subset_frac) + start = np.random.randint(0, len(sequence_indices) - subset_size + 1) + end = start + subset_size + + return sequence_indices[start:end] + + def _load_annotations(self) -> Tuple[List[dict], List[dict]]: + instance_list, image_list = super()._load_annotations() + + h36m_data = self.ann_data + kpts_3d = h36m_data['S'] + + if self.keypoint_2d_src == 'detection': + assert exists(self.keypoint_2d_det_file) + kpts_2d = self._load_keypoint_2d_detection( + self.keypoint_2d_det_file) + assert kpts_2d.shape[0] == kpts_3d.shape[0] + assert kpts_2d.shape[2] == 3 + + for idx, frame_ids in enumerate(self.sequence_indices): + kpt_2d = kpts_2d[frame_ids].astype(np.float32) + keypoints = kpt_2d[..., :2] + keypoints_visible = kpt_2d[..., 2] + instance_list[idx].update({ + 'keypoints': + keypoints, + 'keypoints_visible': + keypoints_visible + }) + + return instance_list, image_list + + @staticmethod + def _parse_h36m_imgname(imgname) -> Tuple[str, str, str]: + """Parse imgname to get information of subject, action and camera. + + A typical h36m image filename is like: + S1_Directions_1.54138969_000001.jpg + """ + subj, rest = osp.basename(imgname).split('_', 1) + action, rest = rest.split('.', 1) + camera, rest = rest.split('_', 1) + return subj, action, camera + + def get_camera_param(self, imgname) -> dict: + """Get camera parameters of a frame by its image name.""" + assert hasattr(self, 'camera_param') + subj, _, camera = self._parse_h36m_imgname(imgname) + return self.camera_param[(subj, camera)] + + def _load_keypoint_2d_detection(self, det_file): + """"Load 2D joint detection results from file.""" + with get_local_path(det_file) as local_path: + kpts_2d = np.load(local_path).astype(np.float32) + + return kpts_2d diff --git a/mmpose/datasets/datasets/utils.py b/mmpose/datasets/datasets/utils.py index 5140126163..7433a168b9 100644 --- a/mmpose/datasets/datasets/utils.py +++ b/mmpose/datasets/datasets/utils.py @@ -174,6 +174,11 @@ def parse_pose_metainfo(metainfo: dict): metainfo['joint_weights'], dtype=np.float32) parsed['sigmas'] = np.array(metainfo['sigmas'], dtype=np.float32) + if 'stats_info' in metainfo: + parsed['stats_info'] = {} + for name, val in metainfo['stats_info'].items(): + parsed['stats_info'][name] = np.array(val, dtype=np.float32) + # formatting def _map(src, mapping: dict): if isinstance(src, (list, tuple)): diff --git a/mmpose/datasets/transforms/__init__.py b/mmpose/datasets/transforms/__init__.py index 61dae74b8c..7ccbf7dac2 100644 --- a/mmpose/datasets/transforms/__init__.py +++ b/mmpose/datasets/transforms/__init__.py @@ -8,6 +8,7 @@ from .converting import KeypointConverter from .formatting import PackPoseInputs from .loading import LoadImage +from .pose3d_transforms import RandomFlipAroundRoot from .topdown_transforms import TopdownAffine __all__ = [ @@ -15,5 +16,5 @@ 'RandomHalfBody', 'TopdownAffine', 'Albumentation', 'PhotometricDistortion', 'PackPoseInputs', 'LoadImage', 'BottomupGetHeatmapMask', 'BottomupRandomAffine', 'BottomupResize', - 'GenerateTarget', 'KeypointConverter' + 'GenerateTarget', 'KeypointConverter', 'RandomFlipAroundRoot' ] diff --git a/mmpose/datasets/transforms/formatting.py b/mmpose/datasets/transforms/formatting.py index dd9ad522f2..403147120d 100644 --- a/mmpose/datasets/transforms/formatting.py +++ b/mmpose/datasets/transforms/formatting.py @@ -37,6 +37,31 @@ def image_to_tensor(img: Union[np.ndarray, return tensor +def keypoints_to_tensor(keypoints: Union[np.ndarray, Sequence[np.ndarray]] + ) -> torch.torch.Tensor: + """Translate keypoints or sequence of keypoints to tensor. Multiple + keypoints tensors will be stacked. + + Args: + keypoints (np.ndarray | Sequence[np.ndarray]): The keypoints or + keypoints sequence. + + Returns: + torch.Tensor: The output tensor. + """ + if isinstance(keypoints, np.ndarray): + keypoints = np.ascontiguousarray(keypoints) + N = keypoints.shape[0] + keypoints = keypoints.transpose(1, 2, 0).reshape(-1, N) + tensor = torch.from_numpy(keypoints).contiguous() + else: + assert is_seq_of(keypoints, np.ndarray) + tensor = torch.stack( + [keypoints_to_tensor(_keypoints) for _keypoints in keypoints]) + + return tensor + + @TRANSFORMS.register_module() class PackPoseInputs(BaseTransform): """Pack the inputs data for pose estimation. @@ -89,6 +114,8 @@ class PackPoseInputs(BaseTransform): 'bbox_score': 'bbox_scores', 'keypoints': 'keypoints', 'keypoints_visible': 'keypoints_visible', + 'lifting_target': 'lifting_target', + 'lifting_target_visible': 'lifting_target_visible', } # items in `label_mapping_table` will be packed into @@ -96,6 +123,9 @@ class PackPoseInputs(BaseTransform): # will be used for computing losses label_mapping_table = { 'keypoint_labels': 'keypoint_labels', + 'lifting_target_label': 'lifting_target_label', + 'lifting_target_weights': 'lifting_target_weights', + 'trajectory_weights': 'trajectory_weights', 'keypoint_x_labels': 'keypoint_x_labels', 'keypoint_y_labels': 'keypoint_y_labels', 'keypoint_weights': 'keypoint_weights', @@ -137,10 +167,17 @@ def transform(self, results: dict) -> dict: - 'data_samples' (obj:`PoseDataSample`): The annotation info of the sample. """ - # Pack image(s) + # Pack image(s) for 2d pose estimation if 'img' in results: img = results['img'] - img_tensor = image_to_tensor(img) + inputs_tensor = image_to_tensor(img) + # Pack keypoints for 3d pose-lifting + elif 'lifting_target' in results and 'keypoints' in results: + if 'keypoint_labels' in results: + keypoints = results['keypoint_labels'] + else: + keypoints = results['keypoints'] + inputs_tensor = keypoints_to_tensor(keypoints) data_sample = PoseDataSample() @@ -148,6 +185,10 @@ def transform(self, results: dict) -> dict: gt_instances = InstanceData() for key, packed_key in self.instance_mapping_table.items(): if key in results: + if 'lifting_target' in results and key in { + 'keypoints', 'keypoints_visible' + }: + continue gt_instances.set_field(results[key], packed_key) # pack `transformed_keypoints` for visualizing data transform @@ -162,6 +203,11 @@ def transform(self, results: dict) -> dict: gt_instance_labels = InstanceData() for key, packed_key in self.label_mapping_table.items(): if key in results: + # For pose-lifting, store only target-related fields + if 'lifting_target_label' in results and key in { + 'keypoint_labels', 'keypoint_weights' + }: + continue if isinstance(results[key], list): # A list of labels is usually generated by combined # multiple encoders (See ``GenerateTarget`` in @@ -202,7 +248,7 @@ def transform(self, results: dict) -> dict: data_sample.set_metainfo(img_meta) packed_results = dict() - packed_results['inputs'] = img_tensor + packed_results['inputs'] = inputs_tensor packed_results['data_samples'] = data_sample return packed_results diff --git a/mmpose/datasets/transforms/pose3d_transforms.py b/mmpose/datasets/transforms/pose3d_transforms.py new file mode 100644 index 0000000000..e6559fa398 --- /dev/null +++ b/mmpose/datasets/transforms/pose3d_transforms.py @@ -0,0 +1,105 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from typing import Dict + +import numpy as np +from mmcv.transforms import BaseTransform + +from mmpose.registry import TRANSFORMS +from mmpose.structures.keypoint import flip_keypoints_custom_center + + +@TRANSFORMS.register_module() +class RandomFlipAroundRoot(BaseTransform): + """Data augmentation with random horizontal joint flip around a root joint. + + Args: + keypoints_flip_cfg (dict): Configurations of the + ``flip_keypoints_custom_center`` function for ``keypoints``. Please + refer to the docstring of the ``flip_keypoints_custom_center`` + function for more details. + target_flip_cfg (dict): Configurations of the + ``flip_keypoints_custom_center`` function for ``lifting_target``. + Please refer to the docstring of the + ``flip_keypoints_custom_center`` function for more details. + flip_prob (float): Probability of flip. Default: 0.5. + flip_camera (bool): Whether to flip horizontal distortion coefficients. + Default: ``False``. + + Required keys: + keypoints + lifting_target + + Modified keys: + (keypoints, keypoints_visible, lifting_target, lifting_target_visible, + camera_param) + """ + + def __init__(self, + keypoints_flip_cfg, + target_flip_cfg, + flip_prob=0.5, + flip_camera=False): + self.keypoints_flip_cfg = keypoints_flip_cfg + self.target_flip_cfg = target_flip_cfg + self.flip_prob = flip_prob + self.flip_camera = flip_camera + + def transform(self, results: Dict) -> dict: + """The transform function of :class:`ZeroCenterPose`. + + See ``transform()`` method of :class:`BaseTransform` for details. + + Args: + results (dict): The result dict + + Returns: + dict: The result dict. + """ + + keypoints = results['keypoints'] + if 'keypoints_visible' in results: + keypoints_visible = results['keypoints_visible'] + else: + keypoints_visible = np.ones(keypoints.shape[:-1], dtype=np.float32) + lifting_target = results['lifting_target'] + if 'lifting_target_visible' in results: + lifting_target_visible = results['lifting_target_visible'] + else: + lifting_target_visible = np.ones( + lifting_target.shape[:-1], dtype=np.float32) + + if np.random.rand() <= self.flip_prob: + if 'flip_indices' not in results: + flip_indices = list(range(self.num_keypoints)) + else: + flip_indices = results['flip_indices'] + + # flip joint coordinates + keypoints, keypoints_visible = flip_keypoints_custom_center( + keypoints, keypoints_visible, flip_indices, + **self.keypoints_flip_cfg) + lifting_target, lifting_target_visible = flip_keypoints_custom_center( # noqa + lifting_target, lifting_target_visible, flip_indices, + **self.target_flip_cfg) + + results['keypoints'] = keypoints + results['keypoints_visible'] = keypoints_visible + results['lifting_target'] = lifting_target + results['lifting_target_visible'] = lifting_target_visible + + # flip horizontal distortion coefficients + if self.flip_camera: + assert 'camera_param' in results, \ + 'Camera parameters are missing.' + _camera_param = deepcopy(results['camera_param']) + + assert 'c' in _camera_param + _camera_param['c'][0] *= -1 + + if 'p' in _camera_param: + _camera_param['p'][0] *= -1 + + results['camera_param'].update(_camera_param) + + return results diff --git a/mmpose/evaluation/functional/__init__.py b/mmpose/evaluation/functional/__init__.py index 2c4a8b5d1e..49f243163c 100644 --- a/mmpose/evaluation/functional/__init__.py +++ b/mmpose/evaluation/functional/__init__.py @@ -1,6 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. -from .keypoint_eval import (keypoint_auc, keypoint_epe, keypoint_nme, - keypoint_pck_accuracy, +from .keypoint_eval import (keypoint_auc, keypoint_epe, keypoint_mpjpe, + keypoint_nme, keypoint_pck_accuracy, multilabel_classification_accuracy, pose_pck_accuracy, simcc_pck_accuracy) from .nms import nms, oks_nms, soft_oks_nms @@ -8,5 +8,5 @@ __all__ = [ 'keypoint_pck_accuracy', 'keypoint_auc', 'keypoint_nme', 'keypoint_epe', 'pose_pck_accuracy', 'multilabel_classification_accuracy', - 'simcc_pck_accuracy', 'nms', 'oks_nms', 'soft_oks_nms' + 'simcc_pck_accuracy', 'nms', 'oks_nms', 'soft_oks_nms', 'keypoint_mpjpe' ] diff --git a/mmpose/evaluation/functional/keypoint_eval.py b/mmpose/evaluation/functional/keypoint_eval.py index 060243357b..3c689f3b00 100644 --- a/mmpose/evaluation/functional/keypoint_eval.py +++ b/mmpose/evaluation/functional/keypoint_eval.py @@ -4,6 +4,7 @@ import numpy as np from mmpose.codecs.utils import get_heatmap_maximum, get_simcc_maximum +from .mesh_eval import compute_similarity_transform def _calc_distances(preds: np.ndarray, gts: np.ndarray, mask: np.ndarray, @@ -318,3 +319,57 @@ def multilabel_classification_accuracy(pred: np.ndarray, # only if it's correct for all labels. acc = (((pred - thr) * (gt - thr)) > 0).all(axis=1).mean() return acc + + +def keypoint_mpjpe(pred: np.ndarray, + gt: np.ndarray, + mask: np.ndarray, + alignment: str = 'none'): + """Calculate the mean per-joint position error (MPJPE) and the error after + rigid alignment with the ground truth (P-MPJPE). + + Note: + - batch_size: N + - num_keypoints: K + - keypoint_dims: C + + Args: + pred (np.ndarray): Predicted keypoint location with shape [N, K, C]. + gt (np.ndarray): Groundtruth keypoint location with shape [N, K, C]. + mask (np.ndarray): Visibility of the target with shape [N, K]. + False for invisible joints, and True for visible. + Invisible joints will be ignored for accuracy calculation. + alignment (str, optional): method to align the prediction with the + groundtruth. Supported options are: + + - ``'none'``: no alignment will be applied + - ``'scale'``: align in the least-square sense in scale + - ``'procrustes'``: align in the least-square sense in + scale, rotation and translation. + + Returns: + tuple: A tuple containing joint position errors + + - (float | np.ndarray): mean per-joint position error (mpjpe). + - (float | np.ndarray): mpjpe after rigid alignment with the + ground truth (p-mpjpe). + """ + assert mask.any() + + if alignment == 'none': + pass + elif alignment == 'procrustes': + pred = np.stack([ + compute_similarity_transform(pred_i, gt_i) + for pred_i, gt_i in zip(pred, gt) + ]) + elif alignment == 'scale': + pred_dot_pred = np.einsum('nkc,nkc->n', pred, pred) + pred_dot_gt = np.einsum('nkc,nkc->n', pred, gt) + scale_factor = pred_dot_gt / pred_dot_pred + pred = pred * scale_factor[:, None, None] + else: + raise ValueError(f'Invalid value for alignment: {alignment}') + error = np.linalg.norm(pred - gt, ord=2, axis=-1)[mask].mean() + + return error diff --git a/mmpose/evaluation/functional/mesh_eval.py b/mmpose/evaluation/functional/mesh_eval.py new file mode 100644 index 0000000000..683b4539b2 --- /dev/null +++ b/mmpose/evaluation/functional/mesh_eval.py @@ -0,0 +1,66 @@ +# ------------------------------------------------------------------------------ +# Adapted from https://github.com/akanazawa/hmr +# Original licence: Copyright (c) 2018 akanazawa, under the MIT License. +# ------------------------------------------------------------------------------ + +import numpy as np + + +def compute_similarity_transform(source_points, target_points): + """Computes a similarity transform (sR, t) that takes a set of 3D points + source_points (N x 3) closest to a set of 3D points target_points, where R + is an 3x3 rotation matrix, t 3x1 translation, s scale. And return the + transformed 3D points source_points_hat (N x 3). i.e. solves the orthogonal + Procrutes problem. + + Note: + Points number: N + + Args: + source_points (np.ndarray): Source point set with shape [N, 3]. + target_points (np.ndarray): Target point set with shape [N, 3]. + + Returns: + np.ndarray: Transformed source point set with shape [N, 3]. + """ + + assert target_points.shape[0] == source_points.shape[0] + assert target_points.shape[1] == 3 and source_points.shape[1] == 3 + + source_points = source_points.T + target_points = target_points.T + + # 1. Remove mean. + mu1 = source_points.mean(axis=1, keepdims=True) + mu2 = target_points.mean(axis=1, keepdims=True) + X1 = source_points - mu1 + X2 = target_points - mu2 + + # 2. Compute variance of X1 used for scale. + var1 = np.sum(X1**2) + + # 3. The outer product of X1 and X2. + K = X1.dot(X2.T) + + # 4. Solution that Maximizes trace(R'K) is R=U*V', where U, V are + # singular vectors of K. + U, _, Vh = np.linalg.svd(K) + V = Vh.T + # Construct Z that fixes the orientation of R to get det(R)=1. + Z = np.eye(U.shape[0]) + Z[-1, -1] *= np.sign(np.linalg.det(U.dot(V.T))) + # Construct R. + R = V.dot(Z.dot(U.T)) + + # 5. Recover scale. + scale = np.trace(R.dot(K)) / var1 + + # 6. Recover translation. + t = mu2 - scale * (R.dot(mu1)) + + # 7. Transform the source points: + source_points_hat = scale * R.dot(source_points) + t + + source_points_hat = source_points_hat.T + + return source_points_hat diff --git a/mmpose/evaluation/metrics/__init__.py b/mmpose/evaluation/metrics/__init__.py index f02c353ef7..ac7e21b5cc 100644 --- a/mmpose/evaluation/metrics/__init__.py +++ b/mmpose/evaluation/metrics/__init__.py @@ -3,11 +3,12 @@ from .coco_wholebody_metric import CocoWholeBodyMetric from .keypoint_2d_metrics import (AUC, EPE, NME, JhmdbPCKAccuracy, MpiiPCKAccuracy, PCKAccuracy) +from .keypoint_3d_metrics import MPJPE from .keypoint_partition_metric import KeypointPartitionMetric from .posetrack18_metric import PoseTrack18Metric __all__ = [ 'CocoMetric', 'PCKAccuracy', 'MpiiPCKAccuracy', 'JhmdbPCKAccuracy', 'AUC', 'EPE', 'NME', 'PoseTrack18Metric', 'CocoWholeBodyMetric', - 'KeypointPartitionMetric' + 'KeypointPartitionMetric', 'MPJPE' ] diff --git a/mmpose/evaluation/metrics/keypoint_3d_metrics.py b/mmpose/evaluation/metrics/keypoint_3d_metrics.py new file mode 100644 index 0000000000..e945650c30 --- /dev/null +++ b/mmpose/evaluation/metrics/keypoint_3d_metrics.py @@ -0,0 +1,131 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import defaultdict +from os import path as osp +from typing import Dict, Optional, Sequence + +import numpy as np +from mmengine.evaluator import BaseMetric +from mmengine.logging import MMLogger + +from mmpose.registry import METRICS +from ..functional import keypoint_mpjpe + + +@METRICS.register_module() +class MPJPE(BaseMetric): + """MPJPE evaluation metric. + + Calculate the mean per-joint position error (MPJPE) of keypoints. + + Note: + - length of dataset: N + - num_keypoints: K + - number of keypoint dimensions: D (typically D = 2) + + Args: + mode (str): Method to align the prediction with the + ground truth. Supported options are: + + - ``'mpjpe'``: no alignment will be applied + - ``'p-mpjpe'``: align in the least-square sense in scale + - ``'n-mpjpe'``: align in the least-square sense in + scale, rotation, and translation. + + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be ``'cpu'`` or + ``'gpu'``. Default: ``'cpu'``. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, ``self.default_prefix`` + will be used instead. Default: ``None``. + """ + + ALIGNMENT = {'mpjpe': 'none', 'p-mpjpe': 'procrustes', 'n-mpjpe': 'scale'} + + def __init__(self, + mode: str = 'mpjpe', + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device=collect_device, prefix=prefix) + allowed_modes = self.ALIGNMENT.keys() + if mode not in allowed_modes: + raise KeyError("`mode` should be 'mpjpe', 'p-mpjpe', or " + f"'n-mpjpe', but got '{mode}'.") + + self.mode = mode + + def process(self, data_batch: Sequence[dict], + data_samples: Sequence[dict]) -> None: + """Process one batch of data samples and predictions. The processed + results should be stored in ``self.results``, which will be used to + compute the metrics when all batches have been processed. + + Args: + data_batch (Sequence[dict]): A batch of data + from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from + the model. + """ + for data_sample in data_samples: + # predicted keypoints coordinates, [1, K, D] + pred_coords = data_sample['pred_instances']['keypoints'] + # ground truth data_info + gt = data_sample['gt_instances'] + # ground truth keypoints coordinates, [1, K, D] + gt_coords = gt['lifting_target'] + # ground truth keypoints_visible, [1, K, 1] + mask = gt['lifting_target_visible'].astype(bool).reshape(1, -1) + # instance action + img_path = data_sample['target_img_path'] + _, rest = osp.basename(img_path).split('_', 1) + action, _ = rest.split('.', 1) + + result = { + 'pred_coords': pred_coords, + 'gt_coords': gt_coords, + 'mask': mask, + 'action': action + } + + self.results.append(result) + + def compute_metrics(self, results: list) -> Dict[str, float]: + """Compute the metrics from processed results. + + Args: + results (list): The processed results of each batch. + + Returns: + Dict[str, float]: The computed metrics. The keys are the names of + the metrics, and the values are the corresponding results. + """ + logger: MMLogger = MMLogger.get_current_instance() + + # pred_coords: [N, K, D] + pred_coords = np.concatenate( + [result['pred_coords'] for result in results]) + if pred_coords.ndim == 4 and pred_coords.shape[1] == 1: + pred_coords = np.squeeze(pred_coords, axis=1) + # gt_coords: [N, K, D] + gt_coords = np.stack([result['gt_coords'] for result in results]) + # mask: [N, K] + mask = np.concatenate([result['mask'] for result in results]) + # action_category_indices: Dict[List[int]] + action_category_indices = defaultdict(list) + for idx, result in enumerate(results): + action_category = result['action'].split('_')[0] + action_category_indices[action_category].append(idx) + + error_name = self.mode.upper() + + logger.info(f'Evaluating {self.mode.upper()}...') + metrics = dict() + + metrics[error_name] = keypoint_mpjpe(pred_coords, gt_coords, mask, + self.ALIGNMENT[self.mode]) + + for action_category, indices in action_category_indices.items(): + metrics[f'{error_name}_{action_category}'] = keypoint_mpjpe( + pred_coords[indices], gt_coords[indices], mask[indices]) + + return metrics diff --git a/mmpose/models/heads/__init__.py b/mmpose/models/heads/__init__.py index 8b4d988a5f..75a626569b 100644 --- a/mmpose/models/heads/__init__.py +++ b/mmpose/models/heads/__init__.py @@ -5,10 +5,12 @@ HeatmapHead, MSPNHead, ViPNASHead) from .hybrid_heads import DEKRHead from .regression_heads import (DSNTHead, IntegralRegressionHead, - RegressionHead, RLEHead) + RegressionHead, RLEHead, TemporalRegressionHead, + TrajectoryRegressionHead) __all__ = [ 'BaseHead', 'HeatmapHead', 'CPMHead', 'MSPNHead', 'ViPNASHead', 'RegressionHead', 'IntegralRegressionHead', 'SimCCHead', 'RLEHead', - 'DSNTHead', 'AssociativeEmbeddingHead', 'DEKRHead', 'CIDHead', 'RTMCCHead' + 'DSNTHead', 'AssociativeEmbeddingHead', 'DEKRHead', 'CIDHead', 'RTMCCHead', + 'TemporalRegressionHead', 'TrajectoryRegressionHead' ] diff --git a/mmpose/models/heads/regression_heads/__init__.py b/mmpose/models/heads/regression_heads/__init__.py index f2a5027b1b..ce9cd5e1b0 100644 --- a/mmpose/models/heads/regression_heads/__init__.py +++ b/mmpose/models/heads/regression_heads/__init__.py @@ -3,10 +3,14 @@ from .integral_regression_head import IntegralRegressionHead from .regression_head import RegressionHead from .rle_head import RLEHead +from .temporal_regression_head import TemporalRegressionHead +from .trajectory_regression_head import TrajectoryRegressionHead __all__ = [ 'RegressionHead', 'IntegralRegressionHead', 'DSNTHead', 'RLEHead', + 'TemporalRegressionHead', + 'TrajectoryRegressionHead', ] diff --git a/mmpose/models/heads/regression_heads/temporal_regression_head.py b/mmpose/models/heads/regression_heads/temporal_regression_head.py new file mode 100644 index 0000000000..ac76316842 --- /dev/null +++ b/mmpose/models/heads/regression_heads/temporal_regression_head.py @@ -0,0 +1,151 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence, Tuple, Union + +import numpy as np +import torch +from torch import Tensor, nn + +from mmpose.evaluation.functional import keypoint_pck_accuracy +from mmpose.registry import KEYPOINT_CODECS, MODELS +from mmpose.utils.tensor_utils import to_numpy +from mmpose.utils.typing import (ConfigType, OptConfigType, OptSampleList, + Predictions) +from ..base_head import BaseHead + +OptIntSeq = Optional[Sequence[int]] + + +@MODELS.register_module() +class TemporalRegressionHead(BaseHead): + """Temporal Regression head of `VideoPose3D`_ by Dario et al (CVPR'2019). + + Args: + in_channels (int | sequence[int]): Number of input channels + num_joints (int): Number of joints + loss (Config): Config for keypoint loss. Defaults to use + :class:`SmoothL1Loss` + decoder (Config, optional): The decoder config that controls decoding + keypoint coordinates from the network output. Defaults to ``None`` + init_cfg (Config, optional): Config to control the initialization. See + :attr:`default_init_cfg` for default settings + + .. _`VideoPose3D`: https://arxiv.org/abs/1811.11742 + """ + + _version = 2 + + def __init__(self, + in_channels: Union[int, Sequence[int]], + num_joints: int, + loss: ConfigType = dict( + type='MSELoss', use_target_weight=True), + decoder: OptConfigType = None, + init_cfg: OptConfigType = None): + + if init_cfg is None: + init_cfg = self.default_init_cfg + + super().__init__(init_cfg) + + self.in_channels = in_channels + self.num_joints = num_joints + self.loss_module = MODELS.build(loss) + if decoder is not None: + self.decoder = KEYPOINT_CODECS.build(decoder) + else: + self.decoder = None + + # Define fully-connected layers + self.conv = nn.Conv1d(in_channels, self.num_joints * 3, 1) + + def forward(self, feats: Tuple[Tensor]) -> Tensor: + """Forward the network. The input is multi scale feature maps and the + output is the coordinates. + + Args: + feats (Tuple[Tensor]): Multi scale feature maps. + + Returns: + Tensor: Output coordinates (and sigmas[optional]). + """ + x = feats[-1] + + x = self.conv(x) + + return x.reshape(-1, self.num_joints, 3) + + def predict(self, + feats: Tuple[Tensor], + batch_data_samples: OptSampleList, + test_cfg: ConfigType = {}) -> Predictions: + """Predict results from outputs. + + Returns: + preds (sequence[InstanceData]): Prediction results. + Each contains the following fields: + + - keypoints: Predicted keypoints of shape (B, N, K, D). + - keypoint_scores: Scores of predicted keypoints of shape + (B, N, K). + """ + + batch_coords = self.forward(feats) # (B, K, D) + + # Restore global position with target_root + target_root = batch_data_samples[0].metainfo.get('target_root', None) + if target_root is not None: + target_root = torch.stack([ + torch.from_numpy(b.metainfo['target_root']) + for b in batch_data_samples + ]) + else: + target_root = torch.stack([ + torch.empty((0), dtype=torch.float32) + for _ in batch_data_samples[0].metainfo + ]) + + preds = self.decode((batch_coords, target_root)) + + return preds + + def loss(self, + inputs: Tuple[Tensor], + batch_data_samples: OptSampleList, + train_cfg: ConfigType = {}) -> dict: + """Calculate losses from a batch of inputs and data samples.""" + + pred_outputs = self.forward(inputs) + + lifting_target_label = torch.cat([ + d.gt_instance_labels.lifting_target_label + for d in batch_data_samples + ]) + lifting_target_weights = torch.cat([ + d.gt_instance_labels.lifting_target_weights + for d in batch_data_samples + ]) + + # calculate losses + losses = dict() + loss = self.loss_module(pred_outputs, lifting_target_label, + lifting_target_weights.unsqueeze(-1)) + + losses.update(loss_pose3d=loss) + + # calculate accuracy + _, avg_acc, _ = keypoint_pck_accuracy( + pred=to_numpy(pred_outputs), + gt=to_numpy(lifting_target_label), + mask=to_numpy(lifting_target_weights) > 0, + thr=0.05, + norm_factor=np.ones((pred_outputs.size(0), 3), dtype=np.float32)) + + mpjpe_pose = torch.tensor(avg_acc, device=lifting_target_label.device) + losses.update(mpjpe=mpjpe_pose) + + return losses + + @property + def default_init_cfg(self): + init_cfg = [dict(type='Normal', layer=['Linear'], std=0.01, bias=0)] + return init_cfg diff --git a/mmpose/models/heads/regression_heads/trajectory_regression_head.py b/mmpose/models/heads/regression_heads/trajectory_regression_head.py new file mode 100644 index 0000000000..adfd7353d3 --- /dev/null +++ b/mmpose/models/heads/regression_heads/trajectory_regression_head.py @@ -0,0 +1,150 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence, Tuple, Union + +import numpy as np +import torch +from torch import Tensor, nn + +from mmpose.evaluation.functional import keypoint_pck_accuracy +from mmpose.registry import KEYPOINT_CODECS, MODELS +from mmpose.utils.tensor_utils import to_numpy +from mmpose.utils.typing import (ConfigType, OptConfigType, OptSampleList, + Predictions) +from ..base_head import BaseHead + +OptIntSeq = Optional[Sequence[int]] + + +@MODELS.register_module() +class TrajectoryRegressionHead(BaseHead): + """Trajectory Regression head of `VideoPose3D`_ by Dario et al (CVPR'2019). + + Args: + in_channels (int | sequence[int]): Number of input channels + num_joints (int): Number of joints + loss (Config): Config for trajectory loss. Defaults to use + :class:`MPJPELoss` + decoder (Config, optional): The decoder config that controls decoding + keypoint coordinates from the network output. Defaults to ``None`` + init_cfg (Config, optional): Config to control the initialization. See + :attr:`default_init_cfg` for default settings + + .. _`VideoPose3D`: https://arxiv.org/abs/1811.11742 + """ + + _version = 2 + + def __init__(self, + in_channels: Union[int, Sequence[int]], + num_joints: int, + loss: ConfigType = dict( + type='MPJPELoss', use_target_weight=True), + decoder: OptConfigType = None, + init_cfg: OptConfigType = None): + + if init_cfg is None: + init_cfg = self.default_init_cfg + + super().__init__(init_cfg) + + self.in_channels = in_channels + self.num_joints = num_joints + self.loss_module = MODELS.build(loss) + if decoder is not None: + self.decoder = KEYPOINT_CODECS.build(decoder) + else: + self.decoder = None + + # Define fully-connected layers + self.conv = nn.Conv1d(in_channels, self.num_joints * 3, 1) + + def forward(self, feats: Tuple[Tensor]) -> Tensor: + """Forward the network. The input is multi scale feature maps and the + output is the coordinates. + + Args: + feats (Tuple[Tensor]): Multi scale feature maps. + + Returns: + Tensor: output coordinates(and sigmas[optional]). + """ + x = feats[-1] + + x = self.conv(x) + + return x.reshape(-1, self.num_joints, 3) + + def predict(self, + feats: Tuple[Tensor], + batch_data_samples: OptSampleList, + test_cfg: ConfigType = {}) -> Predictions: + """Predict results from outputs. + + Returns: + preds (sequence[InstanceData]): Prediction results. + Each contains the following fields: + + - keypoints: Predicted keypoints of shape (B, N, K, D). + - keypoint_scores: Scores of predicted keypoints of shape + (B, N, K). + """ + + batch_coords = self.forward(feats) # (B, K, D) + + # Restore global position with target_root + target_root = batch_data_samples[0].metainfo.get('target_root', None) + if target_root is not None: + target_root = torch.stack([ + torch.from_numpy(b.metainfo['target_root']) + for b in batch_data_samples + ]) + else: + target_root = torch.stack([ + torch.empty((0), dtype=torch.float32) + for _ in batch_data_samples[0].metainfo + ]) + + preds = self.decode((batch_coords, target_root)) + + return preds + + def loss(self, + inputs: Union[Tensor, Tuple[Tensor]], + batch_data_samples: OptSampleList, + train_cfg: ConfigType = {}) -> dict: + """Calculate losses from a batch of inputs and data samples.""" + + pred_outputs = self.forward(inputs) + + lifting_target_label = torch.cat([ + d.gt_instance_labels.lifting_target_label + for d in batch_data_samples + ]) + trajectory_weights = torch.cat([ + d.gt_instance_labels.trajectory_weights for d in batch_data_samples + ]) + + # calculate losses + losses = dict() + loss = self.loss_module(pred_outputs, lifting_target_label, + trajectory_weights.unsqueeze(-1)) + + losses.update(loss_traj=loss) + + # calculate accuracy + _, avg_acc, _ = keypoint_pck_accuracy( + pred=to_numpy(pred_outputs), + gt=to_numpy(lifting_target_label), + mask=to_numpy(trajectory_weights) > 0, + thr=0.05, + norm_factor=np.ones((pred_outputs.size(0), 3), dtype=np.float32)) + + mpjpe_traj = torch.tensor(avg_acc, device=lifting_target_label.device) + losses.update(mpjpe_traj=mpjpe_traj) + + return losses + + @property + def default_init_cfg(self): + init_cfg = [dict(type='Normal', layer=['Linear'], std=0.01, bias=0)] + return init_cfg diff --git a/mmpose/models/pose_estimators/__init__.py b/mmpose/models/pose_estimators/__init__.py index 6ead1a979e..c5287e0c2c 100644 --- a/mmpose/models/pose_estimators/__init__.py +++ b/mmpose/models/pose_estimators/__init__.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. from .bottomup import BottomupPoseEstimator +from .pose_lifter import PoseLifter from .topdown import TopdownPoseEstimator -__all__ = ['TopdownPoseEstimator', 'BottomupPoseEstimator'] +__all__ = ['TopdownPoseEstimator', 'BottomupPoseEstimator', 'PoseLifter'] diff --git a/mmpose/models/pose_estimators/base.py b/mmpose/models/pose_estimators/base.py index 73d60de93a..0ae921d0ec 100644 --- a/mmpose/models/pose_estimators/base.py +++ b/mmpose/models/pose_estimators/base.py @@ -130,6 +130,8 @@ def forward(self, - If ``mode='loss'``, return a dict of tensor(s) which is the loss function value """ + if isinstance(inputs, list): + inputs = torch.stack(inputs) if mode == 'loss': return self.loss(inputs, data_samples) elif mode == 'predict': diff --git a/mmpose/models/pose_estimators/pose_lifter.py b/mmpose/models/pose_estimators/pose_lifter.py new file mode 100644 index 0000000000..5bad3dde3c --- /dev/null +++ b/mmpose/models/pose_estimators/pose_lifter.py @@ -0,0 +1,340 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from itertools import zip_longest +from typing import Tuple, Union + +from torch import Tensor + +from mmpose.models.utils import check_and_update_config +from mmpose.registry import MODELS +from mmpose.utils.typing import (ConfigType, InstanceList, OptConfigType, + Optional, OptMultiConfig, OptSampleList, + PixelDataList, SampleList) +from .base import BasePoseEstimator + + +@MODELS.register_module() +class PoseLifter(BasePoseEstimator): + """Base class for pose lifter. + + Args: + backbone (dict): The backbone config + neck (dict, optional): The neck config. Defaults to ``None`` + head (dict, optional): The head config. Defaults to ``None`` + traj_backbone (dict, optional): The backbone config for trajectory + model. Defaults to ``None`` + traj_neck (dict, optional): The neck config for trajectory model. + Defaults to ``None`` + traj_head (dict, optional): The head config for trajectory model. + Defaults to ``None`` + semi_loss (dict, optional): The semi-supervised loss config. + Defaults to ``None`` + train_cfg (dict, optional): The runtime config for training process. + Defaults to ``None`` + test_cfg (dict, optional): The runtime config for testing process. + Defaults to ``None`` + data_preprocessor (dict, optional): The data preprocessing config to + build the instance of :class:`BaseDataPreprocessor`. Defaults to + ``None`` + init_cfg (dict, optional): The config to control the initialization. + Defaults to ``None`` + metainfo (dict): Meta information for dataset, such as keypoints + definition and properties. If set, the metainfo of the input data + batch will be overridden. For more details, please refer to + https://mmpose.readthedocs.io/en/latest/user_guides/ + prepare_datasets.html#create-a-custom-dataset-info- + config-file-for-the-dataset. Defaults to ``None`` + """ + + def __init__(self, + backbone: ConfigType, + neck: OptConfigType = None, + head: OptConfigType = None, + traj_backbone: OptConfigType = None, + traj_neck: OptConfigType = None, + traj_head: OptConfigType = None, + semi_loss: OptConfigType = None, + train_cfg: OptConfigType = None, + test_cfg: OptConfigType = None, + data_preprocessor: OptConfigType = None, + init_cfg: OptMultiConfig = None, + metainfo: Optional[dict] = None): + super().__init__( + backbone=backbone, + neck=neck, + head=head, + train_cfg=train_cfg, + test_cfg=test_cfg, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg, + metainfo=metainfo) + + # trajectory model + self.share_backbone = False + if traj_head is not None: + if traj_backbone is not None: + self.traj_backbone = MODELS.build(traj_backbone) + else: + self.share_backbone = True + + # the PR #2108 and #2126 modified the interface of neck and head. + # The following function automatically detects outdated + # configurations and updates them accordingly, while also providing + # clear and concise information on the changes made. + traj_neck, traj_head = check_and_update_config( + traj_neck, traj_head) + + if traj_neck is not None: + self.traj_neck = MODELS.build(traj_neck) + + self.traj_head = MODELS.build(traj_head) + + # semi-supervised loss + self.semi_supervised = semi_loss is not None + if self.semi_supervised: + assert any([head, traj_head]) + self.semi_loss = MODELS.build(semi_loss) + + @property + def with_traj_backbone(self): + """bool: Whether the pose lifter has trajectory backbone.""" + return hasattr(self, 'traj_backbone') and \ + self.traj_backbone is not None + + @property + def with_traj_neck(self): + """bool: Whether the pose lifter has trajectory neck.""" + return hasattr(self, 'traj_neck') and self.traj_neck is not None + + @property + def with_traj(self): + """bool: Whether the pose lifter has trajectory head.""" + return hasattr(self, 'traj_head') + + @property + def causal(self): + """bool: Whether the pose lifter is causal.""" + if hasattr(self.backbone, 'causal'): + return self.backbone.causal + else: + raise AttributeError('A PoseLifter\'s backbone should have ' + 'the bool attribute "causal" to indicate if' + 'it performs causal inference.') + + def extract_feat(self, inputs: Tensor) -> Tuple[Tensor]: + """Extract features. + + Args: + inputs (Tensor): Image tensor with shape (N, K, C, T). + + Returns: + tuple[Tensor]: Multi-level features that may have various + resolutions. + """ + # supervised learning + # pose model + feats = self.backbone(inputs) + if self.with_neck: + feats = self.neck(feats) + + # trajectory model + if self.with_traj: + if self.share_backbone: + traj_x = feats + else: + traj_x = self.traj_backbone(inputs) + + if self.with_traj_neck: + traj_x = self.traj_neck(traj_x) + return feats, traj_x + else: + return feats + + def _forward(self, + inputs: Tensor, + data_samples: OptSampleList = None + ) -> Union[Tensor, Tuple[Tensor]]: + """Network forward process. Usually includes backbone, neck and head + forward without any post-processing. + + Args: + inputs (Tensor): Inputs with shape (N, K, C, T). + + Returns: + Union[Tensor | Tuple[Tensor]]: forward output of the network. + """ + feats = self.extract_feat(inputs) + + if self.with_traj: + # forward with trajectory model + x, traj_x = feats + if self.with_head: + x = self.head.forward(x) + + traj_x = self.traj_head.forward(traj_x) + return x, traj_x + else: + # forward without trajectory model + x = feats + if self.with_head: + x = self.head.forward(x) + return x + + def loss(self, inputs: Tensor, data_samples: SampleList) -> dict: + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (Tensor): Inputs with shape (N, K, C, T). + data_samples (List[:obj:`PoseDataSample`]): The batch + data samples. + + Returns: + dict: A dictionary of losses. + """ + feats = self.extract_feat(inputs) + + losses = {} + + if self.with_traj: + x, traj_x = feats + # loss of trajectory model + losses.update( + self.traj_head.loss( + traj_x, data_samples, train_cfg=self.train_cfg)) + else: + x = feats + + if self.with_head: + # loss of pose model + losses.update( + self.head.loss(x, data_samples, train_cfg=self.train_cfg)) + + # TODO: support semi-supervised learning + if self.semi_supervised: + losses.update(semi_loss=self.semi_loss(inputs, data_samples)) + + return losses + + def predict(self, inputs: Tensor, data_samples: SampleList) -> SampleList: + """Predict results from a batch of inputs and data samples with post- + processing. + + Note: + - batch_size: B + - num_input_keypoints: K + - input_keypoint_dim: C + - input_sequence_len: T + + Args: + inputs (Tensor): Inputs with shape like (B, K, C, T). + data_samples (List[:obj:`PoseDataSample`]): The batch + data samples + + Returns: + list[:obj:`PoseDataSample`]: The pose estimation results of the + input images. The return value is `PoseDataSample` instances with + ``pred_instances`` and ``pred_fields``(optional) field , and + ``pred_instances`` usually contains the following keys: + + - keypoints (Tensor): predicted keypoint coordinates in shape + (num_instances, K, D) where K is the keypoint number and D + is the keypoint dimension + - keypoint_scores (Tensor): predicted keypoint scores in shape + (num_instances, K) + """ + assert self.with_head, ( + 'The model must have head to perform prediction.') + + feats = self.extract_feat(inputs) + + pose_preds, batch_pred_instances, batch_pred_fields = None, None, None + traj_preds, batch_traj_instances, batch_traj_fields = None, None, None + if self.with_traj: + x, traj_x = feats + traj_preds = self.traj_head.predict( + traj_x, data_samples, test_cfg=self.test_cfg) + else: + x = feats + + if self.with_head: + pose_preds = self.head.predict( + x, data_samples, test_cfg=self.test_cfg) + + if isinstance(pose_preds, tuple): + batch_pred_instances, batch_pred_fields = pose_preds + else: + batch_pred_instances = pose_preds + + if isinstance(traj_preds, tuple): + batch_traj_instances, batch_traj_fields = traj_preds + else: + batch_traj_instances = traj_preds + + results = self.add_pred_to_datasample(batch_pred_instances, + batch_pred_fields, + batch_traj_instances, + batch_traj_fields, data_samples) + + return results + + def add_pred_to_datasample( + self, + batch_pred_instances: InstanceList, + batch_pred_fields: Optional[PixelDataList], + batch_traj_instances: InstanceList, + batch_traj_fields: Optional[PixelDataList], + batch_data_samples: SampleList, + ) -> SampleList: + """Add predictions into data samples. + + Args: + batch_pred_instances (List[InstanceData]): The predicted instances + of the input data batch + batch_pred_fields (List[PixelData], optional): The predicted + fields (e.g. heatmaps) of the input batch + batch_traj_instances (List[InstanceData]): The predicted instances + of the input data batch + batch_traj_fields (List[PixelData], optional): The predicted + fields (e.g. heatmaps) of the input batch + batch_data_samples (List[PoseDataSample]): The input data batch + + Returns: + List[PoseDataSample]: A list of data samples where the predictions + are stored in the ``pred_instances`` field of each data sample. + """ + assert len(batch_pred_instances) == len(batch_data_samples) + if batch_pred_fields is None: + batch_pred_fields, batch_traj_fields = [], [] + if batch_traj_instances is None: + batch_traj_instances = [] + output_keypoint_indices = self.test_cfg.get('output_keypoint_indices', + None) + + for (pred_instances, pred_fields, traj_instances, traj_fields, + data_sample) in zip_longest(batch_pred_instances, + batch_pred_fields, + batch_traj_instances, + batch_traj_fields, + batch_data_samples): + + if output_keypoint_indices is not None: + # select output keypoints with given indices + num_keypoints = pred_instances.keypoints.shape[1] + for key, value in pred_instances.all_items(): + if key.startswith('keypoint'): + pred_instances.set_field( + value[:, output_keypoint_indices], key) + + data_sample.pred_instances = pred_instances + + if pred_fields is not None: + if output_keypoint_indices is not None: + # select output heatmap channels with keypoint indices + # when the number of heatmap channel matches num_keypoints + for key, value in pred_fields.all_items(): + if value.shape[0] != num_keypoints: + continue + pred_fields.set_field(value[output_keypoint_indices], + key) + data_sample.pred_fields = pred_fields + + return batch_data_samples diff --git a/mmpose/structures/keypoint/__init__.py b/mmpose/structures/keypoint/__init__.py index b8d5a24c7a..12ee96cf7c 100644 --- a/mmpose/structures/keypoint/__init__.py +++ b/mmpose/structures/keypoint/__init__.py @@ -1,5 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. -from .transforms import flip_keypoints +from .transforms import flip_keypoints, flip_keypoints_custom_center -__all__ = ['flip_keypoints'] +__all__ = ['flip_keypoints', 'flip_keypoints_custom_center'] diff --git a/mmpose/structures/keypoint/transforms.py b/mmpose/structures/keypoint/transforms.py index 99adaa1306..b50da4f8fe 100644 --- a/mmpose/structures/keypoint/transforms.py +++ b/mmpose/structures/keypoint/transforms.py @@ -62,3 +62,60 @@ def flip_keypoints(keypoints: np.ndarray, keypoints = [w, h] - keypoints - 1 return keypoints, keypoints_visible + + +def flip_keypoints_custom_center(keypoints: np.ndarray, + keypoints_visible: np.ndarray, + flip_indices: List[int], + center_mode: str = 'static', + center_x: float = 0.5, + center_index: int = 0): + """Flip human joints horizontally. + + Note: + - num_keypoint: K + - dimension: D + + Args: + keypoints (np.ndarray([..., K, D])): Coordinates of keypoints. + keypoints_visible (np.ndarray([..., K])): Visibility item of keypoints. + flip_indices (list[int]): The indices to flip the keypoints. + center_mode (str): The mode to set the center location on the x-axis + to flip around. Options are: + + - static: use a static x value (see center_x also) + - root: use a root joint (see center_index also) + + Defaults: ``'static'``. + center_x (float): Set the x-axis location of the flip center. Only used + when ``center_mode`` is ``'static'``. Defaults: 0.5. + center_index (int): Set the index of the root joint, whose x location + will be used as the flip center. Only used when ``center_mode`` is + ``'root'``. Defaults: 0. + + Returns: + np.ndarray([..., K, C]): Flipped joints. + """ + + assert keypoints.ndim >= 2, f'Invalid pose shape {keypoints.shape}' + + allowed_center_mode = {'static', 'root'} + assert center_mode in allowed_center_mode, 'Get invalid center_mode ' \ + f'{center_mode}, allowed choices are {allowed_center_mode}' + + if center_mode == 'static': + x_c = center_x + elif center_mode == 'root': + assert keypoints.shape[-2] > center_index + x_c = keypoints[..., center_index, 0] + + keypoints_flipped = keypoints.copy() + keypoints_visible_flipped = keypoints_visible.copy() + # Swap left-right parts + for left, right in enumerate(flip_indices): + keypoints_flipped[..., left, :] = keypoints[..., right, :] + keypoints_visible_flipped[..., left] = keypoints_visible[..., right] + + # Flip horizontally + keypoints_flipped[..., 0] = x_c * 2 - keypoints_flipped[..., 0] + return keypoints_flipped, keypoints_visible_flipped diff --git a/mmpose/visualization/__init__.py b/mmpose/visualization/__init__.py index 357d40a707..4a18e8bc5b 100644 --- a/mmpose/visualization/__init__.py +++ b/mmpose/visualization/__init__.py @@ -1,4 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. +from .fast_visualizer import FastVisualizer from .local_visualizer import PoseLocalVisualizer +from .local_visualizer_3d import Pose3dLocalVisualizer -__all__ = ['PoseLocalVisualizer'] +__all__ = ['PoseLocalVisualizer', 'FastVisualizer', 'Pose3dLocalVisualizer'] diff --git a/mmpose/visualization/fast_visualizer.py b/mmpose/visualization/fast_visualizer.py new file mode 100644 index 0000000000..fa0cb38527 --- /dev/null +++ b/mmpose/visualization/fast_visualizer.py @@ -0,0 +1,78 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import cv2 + + +class FastVisualizer: + """MMPose Fast Visualizer. + + A simple yet fast visualizer for video/webcam inference. + + Args: + metainfo (dict): pose meta information + radius (int, optional)): Keypoint radius for visualization. + Defaults to 6. + line_width (int, optional): Link width for visualization. + Defaults to 3. + kpt_thr (float, optional): Threshold for keypoints' confidence score, + keypoints with score below this value will not be drawn. + Defaults to 0.3. + """ + + def __init__(self, metainfo, radius=6, line_width=3, kpt_thr=0.3): + self.radius = radius + self.line_width = line_width + self.kpt_thr = kpt_thr + + self.keypoint_id2name = metainfo['keypoint_id2name'] + self.keypoint_name2id = metainfo['keypoint_name2id'] + self.keypoint_colors = metainfo['keypoint_colors'] + self.skeleton_links = metainfo['skeleton_links'] + self.skeleton_link_colors = metainfo['skeleton_link_colors'] + + def draw_pose(self, img, instances): + """Draw pose estimations on the given image. + + This method draws keypoints and skeleton links on the input image + using the provided instances. + + Args: + img (numpy.ndarray): The input image on which to + draw the pose estimations. + instances (object): An object containing detected instances' + information, including keypoints and keypoint_scores. + + Returns: + None: The input image will be modified in place. + """ + + if instances is None: + print('no instance detected') + return + + keypoints = instances.keypoints + scores = instances.keypoint_scores + + for kpts, score in zip(keypoints, scores): + for sk_id, sk in enumerate(self.skeleton_links): + if score[sk[0]] < self.kpt_thr or score[sk[1]] < self.kpt_thr: + # skip the link that should not be drawn + continue + + pos1 = (int(kpts[sk[0], 0]), int(kpts[sk[0], 1])) + pos2 = (int(kpts[sk[1], 0]), int(kpts[sk[1], 1])) + + color = self.skeleton_link_colors[sk_id].tolist() + cv2.line(img, pos1, pos2, color, thickness=self.line_width) + + for kid, kpt in enumerate(kpts): + if score[kid] < self.kpt_thr: + # skip the point that should not be drawn + continue + + x_coord, y_coord = int(kpt[0]), int(kpt[1]) + + color = self.keypoint_colors[kid].tolist() + cv2.circle(img, (int(x_coord), int(y_coord)), self.radius, + color, -1) + cv2.circle(img, (int(x_coord), int(y_coord)), self.radius, + (255, 255, 255)) diff --git a/mmpose/visualization/local_visualizer_3d.py b/mmpose/visualization/local_visualizer_3d.py new file mode 100644 index 0000000000..764a85dee2 --- /dev/null +++ b/mmpose/visualization/local_visualizer_3d.py @@ -0,0 +1,563 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Dict, List, Optional, Tuple, Union + +import cv2 +import mmcv +import numpy as np +from matplotlib import pyplot as plt +from mmengine.dist import master_only +from mmengine.structures import InstanceData + +from mmpose.registry import VISUALIZERS +from mmpose.structures import PoseDataSample +from . import PoseLocalVisualizer + + +@VISUALIZERS.register_module() +class Pose3dLocalVisualizer(PoseLocalVisualizer): + """MMPose 3d Local Visualizer. + + Args: + name (str): Name of the instance. Defaults to 'visualizer'. + image (np.ndarray, optional): the origin image to draw. The format + should be RGB. Defaults to ``None`` + vis_backends (list, optional): Visual backend config list. Defaults to + ``None`` + save_dir (str, optional): Save file dir for all storage backends. + If it is ``None``, the backend storage will not save any data. + Defaults to ``None`` + bbox_color (str, tuple(int), optional): Color of bbox lines. + The tuple of color should be in BGR order. Defaults to ``'green'`` + kpt_color (str, tuple(tuple(int)), optional): Color of keypoints. + The tuple of color should be in BGR order. Defaults to ``'red'`` + link_color (str, tuple(tuple(int)), optional): Color of skeleton. + The tuple of color should be in BGR order. Defaults to ``None`` + line_width (int, float): The width of lines. Defaults to 1 + radius (int, float): The radius of keypoints. Defaults to 4 + show_keypoint_weight (bool): Whether to adjust the transparency + of keypoints according to their score. Defaults to ``False`` + alpha (int, float): The transparency of bboxes. Defaults to ``0.8`` + """ + + def __init__(self, + name: str = 'visualizer', + image: Optional[np.ndarray] = None, + vis_backends: Optional[Dict] = None, + save_dir: Optional[str] = None, + bbox_color: Optional[Union[str, Tuple[int]]] = 'green', + kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = 'red', + link_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, + text_color: Optional[Union[str, + Tuple[int]]] = (255, 255, 255), + skeleton: Optional[Union[List, Tuple]] = None, + line_width: Union[int, float] = 1, + radius: Union[int, float] = 3, + show_keypoint_weight: bool = False, + backend: str = 'opencv', + alpha: float = 0.8): + super().__init__(name, image, vis_backends, save_dir, bbox_color, + kpt_color, link_color, text_color, skeleton, + line_width, radius, show_keypoint_weight, backend, + alpha) + + def _draw_3d_data_samples( + self, + image: np.ndarray, + pose_samples: PoseDataSample, + draw_gt: bool = True, + kpt_thr: float = 0.3, + num_instances=-1, + axis_azimuth: float = 70, + axis_limit: float = 1.7, + axis_dist: float = 10.0, + axis_elev: float = 15.0, + ): + """Draw keypoints and skeletons (optional) of GT or prediction. + + Args: + image (np.ndarray): The image to draw. + instances (:obj:`InstanceData`): Data structure for + instance-level annotations or predictions. + draw_gt (bool): Whether to draw GT PoseDataSample. Default to + ``True`` + kpt_thr (float, optional): Minimum threshold of keypoints + to be shown. Default: 0.3. + num_instances (int): Number of instances to be shown in 3D. If + smaller than 0, all the instances in the pose_result will be + shown. Otherwise, pad or truncate the pose_result to a length + of num_instances. + axis_azimuth (float): axis azimuth angle for 3D visualizations. + axis_dist (float): axis distance for 3D visualizations. + axis_elev (float): axis elevation view angle for 3D visualizations. + axis_limit (float): The axis limit to visualize 3d pose. The xyz + range will be set as: + - x: [x_c - axis_limit/2, x_c + axis_limit/2] + - y: [y_c - axis_limit/2, y_c + axis_limit/2] + - z: [0, axis_limit] + Where x_c, y_c is the mean value of x and y coordinates + + Returns: + Tuple(np.ndarray): the drawn image which channel is RGB. + """ + vis_height, vis_width, _ = image.shape + + if 'pred_instances' in pose_samples: + pred_instances = pose_samples.pred_instances + else: + pred_instances = InstanceData() + if num_instances < 0: + if 'keypoints' in pred_instances: + num_instances = len(pred_instances) + else: + num_instances = 0 + else: + if len(pred_instances) > num_instances: + for k in pred_instances.keys(): + new_val = pred_instances.k[:num_instances] + pose_samples.pred_instances.k = new_val + elif num_instances < len(pred_instances): + num_instances = len(pred_instances) + + num_fig = num_instances + if draw_gt: + vis_width *= 2 + num_fig *= 2 + + plt.ioff() + fig = plt.figure( + figsize=(vis_width * num_instances * 0.01, vis_height * 0.01)) + + def _draw_3d_instances_kpts(keypoints, + scores, + keypoints_visible, + fig_idx, + title=None): + + for idx, (kpts, score, visible) in enumerate( + zip(keypoints, scores, keypoints_visible)): + + valid = np.logical_and(score >= kpt_thr, + np.any(~np.isnan(kpts), axis=-1)) + + ax = fig.add_subplot( + 1, num_fig, fig_idx * (idx + 1), projection='3d') + ax.view_init(elev=axis_elev, azim=axis_azimuth) + ax.set_zlim3d([0, axis_limit]) + ax.set_aspect('auto') + ax.set_xticks([]) + ax.set_yticks([]) + ax.set_zticks([]) + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_zticklabels([]) + ax.scatter([0], [0], [0], marker='o', color='red') + if title: + ax.set_title(f'{title} ({idx})') + ax.dist = axis_dist + + x_c = np.mean(kpts[valid, 0]) if valid.any() else 0 + y_c = np.mean(kpts[valid, 1]) if valid.any() else 0 + + ax.set_xlim3d([x_c - axis_limit / 2, x_c + axis_limit / 2]) + ax.set_ylim3d([y_c - axis_limit / 2, y_c + axis_limit / 2]) + + kpts = np.array(kpts, copy=False) + + if self.kpt_color is None or isinstance(self.kpt_color, str): + kpt_color = [self.kpt_color] * len(kpts) + elif len(self.kpt_color) == len(kpts): + kpt_color = self.kpt_color + else: + raise ValueError( + f'the length of kpt_color ' + f'({len(self.kpt_color)}) does not matches ' + f'that of keypoints ({len(kpts)})') + + kpts = kpts[valid] + x_3d, y_3d, z_3d = np.split(kpts[:, :3], [1, 2], axis=1) + + kpt_color = kpt_color[valid][..., ::-1] / 255. + + ax.scatter(x_3d, y_3d, z_3d, marker='o', color=kpt_color) + + for kpt_idx in range(len(x_3d)): + ax.text(x_3d[kpt_idx][0], y_3d[kpt_idx][0], + z_3d[kpt_idx][0], str(kpt_idx)) + + if self.skeleton is not None and self.link_color is not None: + if self.link_color is None or isinstance( + self.link_color, str): + link_color = [self.link_color] * len(self.skeleton) + elif len(self.link_color) == len(self.skeleton): + link_color = self.link_color + else: + raise ValueError( + f'the length of link_color ' + f'({len(self.link_color)}) does not matches ' + f'that of skeleton ({len(self.skeleton)})') + + for sk_id, sk in enumerate(self.skeleton): + sk_indices = [_i for _i in sk] + xs_3d = kpts[sk_indices, 0] + ys_3d = kpts[sk_indices, 1] + zs_3d = kpts[sk_indices, 2] + kpt_score = score[sk_indices] + if kpt_score.min() > kpt_thr: + # matplotlib uses RGB color in [0, 1] value range + _color = link_color[sk_id][::-1] / 255. + ax.plot( + xs_3d, ys_3d, zs_3d, color=_color, zdir='z') + + if 'keypoints' in pred_instances: + keypoints = pred_instances.get('keypoints', + pred_instances.keypoints) + + if 'keypoint_scores' in pred_instances: + scores = pred_instances.keypoint_scores + else: + scores = np.ones(keypoints.shape[:-1]) + + if 'keypoints_visible' in pred_instances: + keypoints_visible = pred_instances.keypoints_visible + else: + keypoints_visible = np.ones(keypoints.shape[:-1]) + + _draw_3d_instances_kpts(keypoints, scores, keypoints_visible, 1, + 'Prediction') + + if draw_gt and 'gt_instances' in pose_samples: + gt_instances = pose_samples.gt_instances + if 'lifting_target' in gt_instances: + keypoints = gt_instances.get('lifting_target', + gt_instances.lifting_target) + scores = np.ones(keypoints.shape[:-1]) + + if 'lifting_target_visible' in gt_instances: + keypoints_visible = gt_instances.lifting_target_visible + else: + keypoints_visible = np.ones(keypoints.shape[:-1]) + + _draw_3d_instances_kpts(keypoints, scores, keypoints_visible, + 2, 'Ground Truth') + + # convert figure to numpy array + fig.tight_layout() + fig.canvas.draw() + + pred_img_data = fig.canvas.tostring_rgb() + pred_img_data = np.frombuffer( + fig.canvas.tostring_rgb(), dtype=np.uint8) + + if not pred_img_data.any(): + pred_img_data = np.full((vis_height, vis_width, 3), 255) + else: + pred_img_data = pred_img_data.reshape(vis_height, + vis_width * num_instances, + -1) + + plt.close(fig) + + return pred_img_data + + def _draw_instances_kpts( + self, + image: np.ndarray, + instances: InstanceData, + kpt_thr: float = 0.3, + show_kpt_idx: bool = False, + skeleton_style: str = 'mmpose', + det_kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, + det_dataset_skeleton: Optional[List] = None, + det_dataset_link_color: Optional[np.ndarray] = None): + """Draw keypoints and skeletons (optional) of GT or prediction. + + Args: + image (np.ndarray): The image to draw. + instances (:obj:`InstanceData`): Data structure for + instance-level annotations or predictions. + kpt_thr (float, optional): Minimum threshold of keypoints + to be shown. Default: 0.3. + show_kpt_idx (bool): Whether to show the index of keypoints. + Defaults to ``False`` + skeleton_style (str): Skeleton style selection. Defaults to + ``'mmpose'`` + det_kpt_color (str, tuple(tuple(int)), optional): Keypoints + color info for detection. Defaults to ``None`` + det_dataset_skeleton (list): Skeleton info for detection. Defaults + to ``None`` + det_dataset_link_color (list): Link color for detection. Defaults + to ``None`` + + Returns: + np.ndarray: the drawn image which channel is RGB. + """ + + self.set_image(image) + img_h, img_w, _ = image.shape + + if 'keypoints' in instances: + keypoints = instances.get('transformed_keypoints', + instances.keypoints) + + if 'keypoint_scores' in instances: + scores = instances.keypoint_scores + else: + scores = np.ones(keypoints.shape[:-1]) + + if 'keypoints_visible' in instances: + keypoints_visible = instances.keypoints_visible + else: + keypoints_visible = np.ones(keypoints.shape[:-1]) + + if skeleton_style == 'openpose': + keypoints_info = np.concatenate( + (keypoints, scores[..., None], keypoints_visible[..., + None]), + axis=-1) + # compute neck joint + neck = np.mean(keypoints_info[:, [5, 6]], axis=1) + # neck score when visualizing pred + neck[:, 2:4] = np.logical_and( + keypoints_info[:, 5, 2:4] > kpt_thr, + keypoints_info[:, 6, 2:4] > kpt_thr).astype(int) + new_keypoints_info = np.insert( + keypoints_info, 17, neck, axis=1) + + mmpose_idx = [ + 17, 6, 8, 10, 7, 9, 12, 14, 16, 13, 15, 2, 1, 4, 3 + ] + openpose_idx = [ + 1, 2, 3, 4, 6, 7, 8, 9, 10, 12, 13, 14, 15, 16, 17 + ] + new_keypoints_info[:, openpose_idx] = \ + new_keypoints_info[:, mmpose_idx] + keypoints_info = new_keypoints_info + + keypoints, scores, keypoints_visible = keypoints_info[ + ..., :2], keypoints_info[..., 2], keypoints_info[..., 3] + + kpt_color = self.kpt_color + if det_kpt_color is not None: + kpt_color = det_kpt_color + + for kpts, score, visible in zip(keypoints, scores, + keypoints_visible): + kpts = np.array(kpts, copy=False) + + if kpt_color is None or isinstance(kpt_color, str): + kpt_color = [kpt_color] * len(kpts) + elif len(kpt_color) == len(kpts): + kpt_color = kpt_color + else: + raise ValueError(f'the length of kpt_color ' + f'({len(kpt_color)}) does not matches ' + f'that of keypoints ({len(kpts)})') + + # draw each point on image + for kid, kpt in enumerate(kpts): + if score[kid] < kpt_thr or not visible[ + kid] or kpt_color[kid] is None: + # skip the point that should not be drawn + continue + + color = kpt_color[kid] + if not isinstance(color, str): + color = tuple(int(c) for c in color) + transparency = self.alpha + if self.show_keypoint_weight: + transparency *= max(0, min(1, score[kid])) + self.draw_circles( + kpt, + radius=np.array([self.radius]), + face_colors=color, + edge_colors=color, + alpha=transparency, + line_widths=self.radius) + if show_kpt_idx: + self.draw_texts( + str(kid), + kpt, + colors=color, + font_sizes=self.radius * 3, + vertical_alignments='bottom', + horizontal_alignments='center') + + # draw links + skeleton = self.skeleton + if det_dataset_skeleton is not None: + skeleton = det_dataset_skeleton + link_color = self.link_color + if det_dataset_link_color is not None: + link_color = det_dataset_link_color + if skeleton is not None and link_color is not None: + if link_color is None or isinstance(link_color, str): + link_color = [link_color] * len(skeleton) + elif len(link_color) == len(skeleton): + link_color = link_color + else: + raise ValueError( + f'the length of link_color ' + f'({len(link_color)}) does not matches ' + f'that of skeleton ({len(skeleton)})') + + for sk_id, sk in enumerate(skeleton): + pos1 = (int(kpts[sk[0], 0]), int(kpts[sk[0], 1])) + pos2 = (int(kpts[sk[1], 0]), int(kpts[sk[1], 1])) + if not (visible[sk[0]] and visible[sk[1]]): + continue + + if (pos1[0] <= 0 or pos1[0] >= img_w or pos1[1] <= 0 + or pos1[1] >= img_h or pos2[0] <= 0 + or pos2[0] >= img_w or pos2[1] <= 0 + or pos2[1] >= img_h or score[sk[0]] < kpt_thr + or score[sk[1]] < kpt_thr + or link_color[sk_id] is None): + # skip the link that should not be drawn + continue + X = np.array((pos1[0], pos2[0])) + Y = np.array((pos1[1], pos2[1])) + color = link_color[sk_id] + if not isinstance(color, str): + color = tuple(int(c) for c in color) + transparency = self.alpha + if self.show_keypoint_weight: + transparency *= max( + 0, min(1, 0.5 * (score[sk[0]] + score[sk[1]]))) + + if skeleton_style == 'openpose': + mX = np.mean(X) + mY = np.mean(Y) + length = ((Y[0] - Y[1])**2 + (X[0] - X[1])**2)**0.5 + angle = math.degrees( + math.atan2(Y[0] - Y[1], X[0] - X[1])) + stickwidth = 2 + polygons = cv2.ellipse2Poly( + (int(mX), int(mY)), + (int(length / 2), int(stickwidth)), int(angle), + 0, 360, 1) + + self.draw_polygons( + polygons, + edge_colors=color, + face_colors=color, + alpha=transparency) + + else: + self.draw_lines( + X, Y, color, line_widths=self.line_width) + + return self.get_image() + + @master_only + def add_datasample( + self, + name: str, + image: np.ndarray, + data_sample: PoseDataSample, + det_data_sample: Optional[PoseDataSample] = None, + draw_gt: bool = True, + draw_pred: bool = True, + draw_2d: bool = True, + det_kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, + det_dataset_skeleton: Optional[Union[str, + Tuple[Tuple[int]]]] = None, + det_dataset_link_color: Optional[np.ndarray] = None, + draw_bbox: bool = False, + show_kpt_idx: bool = False, + skeleton_style: str = 'mmpose', + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + kpt_thr: float = 0.3, + step: int = 0) -> None: + """Draw datasample and save to all backends. + + - If GT and prediction are plotted at the same time, they are + displayed in a stitched image where the left image is the + ground truth and the right image is the prediction. + - If ``show`` is True, all storage backends are ignored, and + the images will be displayed in a local window. + - If ``out_file`` is specified, the drawn image will be + saved to ``out_file``. t is usually used when the display + is not available. + + Args: + name (str): The image identifier + image (np.ndarray): The image to draw + data_sample (:obj:`PoseDataSample`): The 3d data sample + to visualize + det_data_sample (:obj:`PoseDataSample`, optional): The 2d detection + data sample to visualize + draw_gt (bool): Whether to draw GT PoseDataSample. Default to + ``True`` + draw_pred (bool): Whether to draw Prediction PoseDataSample. + Defaults to ``True`` + draw_2d (bool): Whether to draw 2d detection results. Defaults to + ``True`` + det_kpt_color (str, tuple(tuple(int)), optional): Keypoints color + info for detection. Defaults to ``None`` + det_dataset_skeleton (np.ndarray, optional): The skeleton link info + for detection data. Default to ``None`` + det_dataset_link_color (str, tuple(tuple(int)), optional): Link + color for detection. Defaults to ``None`` + draw_bbox (bool): Whether to draw bounding boxes. Default to + ``False`` + show_kpt_idx (bool): Whether to show the index of keypoints. + Defaults to ``False`` + skeleton_style (str): Skeleton style selection. Defaults to + ``'mmpose'`` + show (bool): Whether to display the drawn image. Default to + ``False`` + wait_time (float): The interval of show (s). Defaults to 0 + out_file (str): Path to output file. Defaults to ``None`` + kpt_thr (float, optional): Minimum threshold of keypoints + to be shown. Default: 0.3. + step (int): Global step value to record. Defaults to 0 + """ + + det_img_data = None + gt_img_data = None + + if draw_2d: + det_img_data = image.copy() + + # draw bboxes & keypoints + if 'pred_instances' in det_data_sample: + det_img_data = self._draw_instances_kpts( + det_img_data, det_data_sample.pred_instances, kpt_thr, + show_kpt_idx, skeleton_style, det_kpt_color, + det_dataset_skeleton, det_dataset_link_color) + if draw_bbox: + det_img_data = self._draw_instances_bbox( + det_img_data, det_data_sample.pred_instances) + + pred_img_data = self._draw_3d_data_samples( + image.copy(), data_sample, draw_gt=draw_gt) + + # merge visualization results + if det_img_data is not None and gt_img_data is not None: + drawn_img = np.concatenate( + (det_img_data, pred_img_data, gt_img_data), axis=1) + elif det_img_data is not None: + drawn_img = np.concatenate((det_img_data, pred_img_data), axis=1) + elif gt_img_data is not None: + drawn_img = np.concatenate((det_img_data, gt_img_data), axis=1) + else: + drawn_img = pred_img_data + + # It is convenient for users to obtain the drawn image. + # For example, the user wants to obtain the drawn image and + # save it as a video during video inference. + self.set_image(drawn_img) + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + # save drawn_img to backends + self.add_image(name, drawn_img, step) + + return self.get_image() diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index 4d6f4e6d94..b070f24d1e 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -593,6 +593,41 @@ set-ExecutionPolicy RemoteSigned example\cpp\build\Release ``` +### MMPose demo scripts + +MMPose provides demo scripts to conduct [inference with existing models](https://mmpose.readthedocs.io/en/latest/user_guides/inference.html). + +**Note:** + +- Inferencing with Pytorch can not reach the maximum speed of RTMPose, just for verification. + +```shell +# go to the mmpose folder +cd ${PATH_TO_MMPOSE} + +# inference with rtmdet +python demo/topdown_demo_with_mmdet.py \ + projects/rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \ + {PATH_TO_CHECKPOINT}/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \ + projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \ + {PATH_TO_CHECKPOINT}/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \ + --input {YOUR_TEST_IMG_or_VIDEO} \ + --show + +# inference with webcam +python demo/topdown_demo_with_mmdet.py \ + projects/rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \ + {PATH_TO_CHECKPOINT}/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \ + projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \ + {PATH_TO_CHECKPOINT}/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \ + --input webcam \ + --show +``` + +Result is as follows: + +![topdown_inference_with_rtmdet](https://user-images.githubusercontent.com/13503330/220005020-06bdf37f-6817-4681-a2c8-9dd55e4fbf1e.png) + ## 👨‍🏫 How to Train [🔝](#-table-of-contents) Please refer to [Train and Test](https://mmpose.readthedocs.io/en/latest/user_guides/train_and_test.html). diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 7abafc25c4..01f5240fed 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -584,6 +584,41 @@ set-ExecutionPolicy RemoteSigned example\cpp\build\Release ``` +### MMPose demo 脚本 + +通过 MMPose 提供的 demo 脚本可以基于 Pytorch 快速进行[模型推理](https://mmpose.readthedocs.io/en/latest/user_guides/inference.html)和效果验证。 + +**提示:** + +- 基于 Pytorch 推理并不能达到 RTMPose 模型的真实推理速度,只用于模型效果验证。 + +```shell +# 前往 mmpose 目录 +cd ${PATH_TO_MMPOSE} + +# RTMDet 与 RTMPose 联合推理 +python demo/topdown_demo_with_mmdet.py \ + projects/rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \ + {PATH_TO_CHECKPOINT}/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \ + projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \ + {PATH_TO_CHECKPOINT}/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \ + --input {YOUR_TEST_IMG_or_VIDEO} \ + --show + +# 摄像头推理 +python demo/topdown_demo_with_mmdet.py \ + projects/rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \ + {PATH_TO_CHECKPOINT}/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \ + projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \ + {PATH_TO_CHECKPOINT}/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \ + --input webcam \ + --show +``` + +效果展示: + +![topdown_inference_with_rtmdet](https://user-images.githubusercontent.com/13503330/220005020-06bdf37f-6817-4681-a2c8-9dd55e4fbf1e.png) + ## 👨‍🏫 模型训练 [🔝](#-table-of-contents) 请参考 [训练与测试](https://mmpose.readthedocs.io/en/latest/user_guides/train_and_test.html) 进行 RTMPose 的训练。 diff --git a/tests/data/h36m/S1_Directions_1.54138969_000001.jpg b/tests/data/h36m/S1/S1_Directions_1.54138969/S1_Directions_1.54138969_000001.jpg similarity index 100% rename from tests/data/h36m/S1_Directions_1.54138969_000001.jpg rename to tests/data/h36m/S1/S1_Directions_1.54138969/S1_Directions_1.54138969_000001.jpg diff --git a/tests/data/h36m/S5_SittingDown.54138969_002061.jpg b/tests/data/h36m/S5/S5_SittingDown.54138969/S5_SittingDown.54138969_002061.jpg similarity index 100% rename from tests/data/h36m/S5_SittingDown.54138969_002061.jpg rename to tests/data/h36m/S5/S5_SittingDown.54138969/S5_SittingDown.54138969_002061.jpg diff --git a/tests/data/h36m/S7_Greeting.55011271_000396.jpg b/tests/data/h36m/S7/S7_Greeting.55011271/S7_Greeting.55011271_000396.jpg similarity index 100% rename from tests/data/h36m/S7_Greeting.55011271_000396.jpg rename to tests/data/h36m/S7/S7_Greeting.55011271/S7_Greeting.55011271_000396.jpg diff --git a/tests/data/h36m/S8_WalkDog_1.55011271_000026.jpg b/tests/data/h36m/S8/S8_WalkDog_1.55011271/S8_WalkDog_1.55011271_000026.jpg similarity index 100% rename from tests/data/h36m/S8_WalkDog_1.55011271_000026.jpg rename to tests/data/h36m/S8/S8_WalkDog_1.55011271/S8_WalkDog_1.55011271_000026.jpg diff --git a/tests/test_codecs/test_image_pose_lifting.py b/tests/test_codecs/test_image_pose_lifting.py new file mode 100644 index 0000000000..bb94786c32 --- /dev/null +++ b/tests/test_codecs/test_image_pose_lifting.py @@ -0,0 +1,150 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from unittest import TestCase + +import numpy as np + +from mmpose.codecs import ImagePoseLifting +from mmpose.registry import KEYPOINT_CODECS + + +class TestImagePoseLifting(TestCase): + + def setUp(self) -> None: + keypoints = (0.1 + 0.8 * np.random.rand(1, 17, 2)) * [192, 256] + keypoints = np.round(keypoints).astype(np.float32) + keypoints_visible = np.random.randint(2, size=(1, 17)) + lifting_target = (0.1 + 0.8 * np.random.rand(17, 3)) + lifting_target_visible = np.random.randint(2, size=(17, )) + encoded_wo_sigma = np.random.rand(1, 17, 3) + + self.keypoints_mean = np.random.rand(17, 2).astype(np.float32) + self.keypoints_std = np.random.rand(17, 2).astype(np.float32) + 1e-6 + self.target_mean = np.random.rand(17, 3).astype(np.float32) + self.target_std = np.random.rand(17, 3).astype(np.float32) + 1e-6 + + self.data = dict( + keypoints=keypoints, + keypoints_visible=keypoints_visible, + lifting_target=lifting_target, + lifting_target_visible=lifting_target_visible, + encoded_wo_sigma=encoded_wo_sigma) + + def build_pose_lifting_label(self, **kwargs): + cfg = dict(type='ImagePoseLifting', num_keypoints=17, root_index=0) + cfg.update(kwargs) + return KEYPOINT_CODECS.build(cfg) + + def test_build(self): + codec = self.build_pose_lifting_label() + self.assertIsInstance(codec, ImagePoseLifting) + + def test_encode(self): + keypoints = self.data['keypoints'] + keypoints_visible = self.data['keypoints_visible'] + lifting_target = self.data['lifting_target'] + lifting_target_visible = self.data['lifting_target_visible'] + + # test default settings + codec = self.build_pose_lifting_label() + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible) + + self.assertEqual(encoded['keypoint_labels'].shape, (1, 17, 2)) + self.assertEqual(encoded['lifting_target_label'].shape, (17, 3)) + self.assertEqual(encoded['lifting_target_weights'].shape, (17, )) + self.assertEqual(encoded['trajectory_weights'].shape, (17, )) + self.assertEqual(encoded['target_root'].shape, (3, )) + + # test removing root + codec = self.build_pose_lifting_label( + remove_root=True, save_index=True) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible) + + self.assertTrue('target_root_removed' in encoded + and 'target_root_index' in encoded) + self.assertEqual(encoded['lifting_target_weights'].shape, (16, )) + self.assertEqual(encoded['keypoint_labels'].shape, (1, 17, 2)) + self.assertEqual(encoded['lifting_target_label'].shape, (16, 3)) + self.assertEqual(encoded['target_root'].shape, (3, )) + + # test normalization + codec = self.build_pose_lifting_label( + keypoints_mean=self.keypoints_mean, + keypoints_std=self.keypoints_std, + target_mean=self.target_mean, + target_std=self.target_std) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible) + + self.assertEqual(encoded['keypoint_labels'].shape, (1, 17, 2)) + self.assertEqual(encoded['lifting_target_label'].shape, (17, 3)) + + def test_decode(self): + lifting_target = self.data['lifting_target'] + encoded_wo_sigma = self.data['encoded_wo_sigma'] + + codec = self.build_pose_lifting_label() + + decoded, scores = codec.decode( + encoded_wo_sigma, target_root=lifting_target[..., 0, :]) + + self.assertEqual(decoded.shape, (1, 17, 3)) + self.assertEqual(scores.shape, (1, 17)) + + codec = self.build_pose_lifting_label(remove_root=True) + + decoded, scores = codec.decode( + encoded_wo_sigma, target_root=lifting_target[..., 0, :]) + + self.assertEqual(decoded.shape, (1, 18, 3)) + self.assertEqual(scores.shape, (1, 18)) + + def test_cicular_verification(self): + keypoints = self.data['keypoints'] + keypoints_visible = self.data['keypoints_visible'] + lifting_target = self.data['lifting_target'] + lifting_target_visible = self.data['lifting_target_visible'] + + # test default settings + codec = self.build_pose_lifting_label() + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible) + + _keypoints, _ = codec.decode( + np.expand_dims(encoded['lifting_target_label'], axis=0), + target_root=lifting_target[..., 0, :]) + + self.assertTrue( + np.allclose( + np.expand_dims(lifting_target, axis=0), _keypoints, atol=5.)) + + # test removing root + codec = self.build_pose_lifting_label(remove_root=True) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible) + + _keypoints, _ = codec.decode( + np.expand_dims(encoded['lifting_target_label'], axis=0), + target_root=lifting_target[..., 0, :]) + + self.assertTrue( + np.allclose( + np.expand_dims(lifting_target, axis=0), _keypoints, atol=5.)) + + # test normalization + codec = self.build_pose_lifting_label( + keypoints_mean=self.keypoints_mean, + keypoints_std=self.keypoints_std, + target_mean=self.target_mean, + target_std=self.target_std) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible) + + _keypoints, _ = codec.decode( + np.expand_dims(encoded['lifting_target_label'], axis=0), + target_root=lifting_target[..., 0, :]) + + self.assertTrue( + np.allclose( + np.expand_dims(lifting_target, axis=0), _keypoints, atol=5.)) diff --git a/tests/test_codecs/test_video_pose_lifting.py b/tests/test_codecs/test_video_pose_lifting.py new file mode 100644 index 0000000000..cc58292d0c --- /dev/null +++ b/tests/test_codecs/test_video_pose_lifting.py @@ -0,0 +1,156 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from unittest import TestCase + +import numpy as np +from mmengine.fileio import load + +from mmpose.codecs import VideoPoseLifting +from mmpose.registry import KEYPOINT_CODECS + + +class TestVideoPoseLifting(TestCase): + + def get_camera_param(self, imgname, camera_param) -> dict: + """Get camera parameters of a frame by its image name.""" + subj, rest = osp.basename(imgname).split('_', 1) + action, rest = rest.split('.', 1) + camera, rest = rest.split('_', 1) + return camera_param[(subj, camera)] + + def build_pose_lifting_label(self, **kwargs): + cfg = dict(type='VideoPoseLifting', num_keypoints=17) + cfg.update(kwargs) + return KEYPOINT_CODECS.build(cfg) + + def setUp(self) -> None: + keypoints = (0.1 + 0.8 * np.random.rand(1, 17, 2)) * [192, 256] + keypoints = np.round(keypoints).astype(np.float32) + keypoints_visible = np.random.randint(2, size=(1, 17)) + lifting_target = (0.1 + 0.8 * np.random.rand(17, 3)) + lifting_target_visible = np.random.randint(2, size=(17, )) + encoded_wo_sigma = np.random.rand(1, 17, 3) + + camera_param = load('tests/data/h36m/cameras.pkl') + camera_param = self.get_camera_param( + 'S1/S1_Directions_1.54138969/S1_Directions_1.54138969_000001.jpg', + camera_param) + + self.data = dict( + keypoints=keypoints, + keypoints_visible=keypoints_visible, + lifting_target=lifting_target, + lifting_target_visible=lifting_target_visible, + camera_param=camera_param, + encoded_wo_sigma=encoded_wo_sigma) + + def test_build(self): + codec = self.build_pose_lifting_label() + self.assertIsInstance(codec, VideoPoseLifting) + + def test_encode(self): + keypoints = self.data['keypoints'] + keypoints_visible = self.data['keypoints_visible'] + lifting_target = self.data['lifting_target'] + lifting_target_visible = self.data['lifting_target_visible'] + camera_param = self.data['camera_param'] + + # test default settings + codec = self.build_pose_lifting_label() + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible, camera_param) + + self.assertEqual(encoded['keypoint_labels'].shape, (1, 17, 2)) + self.assertEqual(encoded['lifting_target_label'].shape, (17, 3)) + self.assertEqual(encoded['lifting_target_weights'].shape, (17, )) + self.assertEqual(encoded['trajectory_weights'].shape, (17, )) + self.assertEqual(encoded['target_root'].shape, (3, )) + + # test not zero-centering + codec = self.build_pose_lifting_label(zero_center=False) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible, camera_param) + + self.assertEqual(encoded['keypoint_labels'].shape, (1, 17, 2)) + self.assertEqual(encoded['lifting_target_label'].shape, (17, 3)) + self.assertEqual(encoded['lifting_target_weights'].shape, (17, )) + self.assertEqual(encoded['trajectory_weights'].shape, (17, )) + + # test removing root + codec = self.build_pose_lifting_label( + remove_root=True, save_index=True) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible, camera_param) + + self.assertTrue('target_root_removed' in encoded + and 'target_root_index' in encoded) + self.assertEqual(encoded['lifting_target_weights'].shape, (16, )) + self.assertEqual(encoded['keypoint_labels'].shape, (1, 17, 2)) + self.assertEqual(encoded['lifting_target_label'].shape, (16, 3)) + self.assertEqual(encoded['target_root'].shape, (3, )) + + # test normalizing camera + codec = self.build_pose_lifting_label(normalize_camera=True) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible, camera_param) + + self.assertTrue('camera_param' in encoded) + scale = np.array(0.5 * camera_param['w'], dtype=np.float32) + self.assertTrue( + np.allclose( + camera_param['f'] / scale, + encoded['camera_param']['f'], + atol=4.)) + + def test_decode(self): + lifting_target = self.data['lifting_target'] + encoded_wo_sigma = self.data['encoded_wo_sigma'] + + codec = self.build_pose_lifting_label() + + decoded, scores = codec.decode( + encoded_wo_sigma, target_root=lifting_target[..., 0, :]) + + self.assertEqual(decoded.shape, (1, 17, 3)) + self.assertEqual(scores.shape, (1, 17)) + + codec = self.build_pose_lifting_label(remove_root=True) + + decoded, scores = codec.decode( + encoded_wo_sigma, target_root=lifting_target[..., 0, :]) + + self.assertEqual(decoded.shape, (1, 18, 3)) + self.assertEqual(scores.shape, (1, 18)) + + def test_cicular_verification(self): + keypoints = self.data['keypoints'] + keypoints_visible = self.data['keypoints_visible'] + lifting_target = self.data['lifting_target'] + lifting_target_visible = self.data['lifting_target_visible'] + camera_param = self.data['camera_param'] + + # test default settings + codec = self.build_pose_lifting_label() + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible, camera_param) + + _keypoints, _ = codec.decode( + np.expand_dims(encoded['lifting_target_label'], axis=0), + target_root=lifting_target[..., 0, :]) + + self.assertTrue( + np.allclose( + np.expand_dims(lifting_target, axis=0), _keypoints, atol=5.)) + + # test removing root + codec = self.build_pose_lifting_label(remove_root=True) + encoded = codec.encode(keypoints, keypoints_visible, lifting_target, + lifting_target_visible, camera_param) + + _keypoints, _ = codec.decode( + np.expand_dims(encoded['lifting_target_label'], axis=0), + target_root=lifting_target[..., 0, :]) + + self.assertTrue( + np.allclose( + np.expand_dims(lifting_target, axis=0), _keypoints, atol=5.)) diff --git a/tests/test_datasets/test_datasets/test_body_datasets/test_h36m_dataset.py b/tests/test_datasets/test_datasets/test_body_datasets/test_h36m_dataset.py new file mode 100644 index 0000000000..88944dc11f --- /dev/null +++ b/tests/test_datasets/test_datasets/test_body_datasets/test_h36m_dataset.py @@ -0,0 +1,175 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from unittest import TestCase + +import numpy as np + +from mmpose.datasets.datasets.body3d import Human36mDataset + + +class TestH36MDataset(TestCase): + + def build_h36m_dataset(self, **kwargs): + + cfg = dict( + ann_file='test_h36m_body3d.npz', + data_mode='topdown', + data_root='tests/data/h36m', + pipeline=[], + test_mode=False) + + cfg.update(kwargs) + return Human36mDataset(**cfg) + + def check_data_info_keys(self, + data_info: dict, + data_mode: str = 'topdown'): + if data_mode == 'topdown': + expected_keys = dict( + img_ids=list, + img_paths=list, + keypoints=np.ndarray, + keypoints_3d=np.ndarray, + scale=np.float32, + center=np.ndarray, + id=int) + elif data_mode == 'bottomup': + expected_keys = dict( + img_ids=list, + img_paths=list, + keypoints=np.ndarray, + keypoints_3d=np.ndarray, + scale=list, + center=np.ndarray, + invalid_segs=list, + id=list) + else: + raise ValueError(f'Invalid data_mode {data_mode}') + + for key, type_ in expected_keys.items(): + self.assertIn(key, data_info) + self.assertIsInstance(data_info[key], type_, key) + + def check_metainfo_keys(self, metainfo: dict): + expected_keys = dict( + dataset_name=str, + num_keypoints=int, + keypoint_id2name=dict, + keypoint_name2id=dict, + upper_body_ids=list, + lower_body_ids=list, + flip_indices=list, + flip_pairs=list, + keypoint_colors=np.ndarray, + num_skeleton_links=int, + skeleton_links=list, + skeleton_link_colors=np.ndarray, + dataset_keypoint_weights=np.ndarray) + + for key, type_ in expected_keys.items(): + self.assertIn(key, metainfo) + self.assertIsInstance(metainfo[key], type_, key) + + def test_metainfo(self): + dataset = self.build_h36m_dataset() + self.check_metainfo_keys(dataset.metainfo) + # test dataset_name + self.assertEqual(dataset.metainfo['dataset_name'], 'h36m') + + # test number of keypoints + num_keypoints = 17 + self.assertEqual(dataset.metainfo['num_keypoints'], num_keypoints) + self.assertEqual( + len(dataset.metainfo['keypoint_colors']), num_keypoints) + self.assertEqual( + len(dataset.metainfo['dataset_keypoint_weights']), num_keypoints) + + # test some extra metainfo + self.assertEqual( + len(dataset.metainfo['skeleton_links']), + len(dataset.metainfo['skeleton_link_colors'])) + + def test_topdown(self): + # test topdown training + dataset = self.build_h36m_dataset(data_mode='topdown') + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0]) + + # test topdown testing + dataset = self.build_h36m_dataset(data_mode='topdown', test_mode=True) + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0]) + + # test topdown training with camera file + dataset = self.build_h36m_dataset( + data_mode='topdown', camera_param_file='cameras.pkl') + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0]) + + # test topdown training with sequence config + dataset = self.build_h36m_dataset( + data_mode='topdown', + seq_len=27, + seq_step=1, + causal=False, + pad_video_seq=True, + camera_param_file='cameras.pkl') + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0]) + + # test topdown testing with 2d keypoint detection file and + # sequence config + dataset = self.build_h36m_dataset( + data_mode='topdown', + seq_len=27, + seq_step=1, + causal=False, + pad_video_seq=True, + test_mode=True, + keypoint_2d_src='detection', + keypoint_2d_det_file='test_h36m_2d_detection.npy') + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0]) + + def test_bottomup(self): + # test bottomup training + dataset = self.build_h36m_dataset(data_mode='bottomup') + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0], data_mode='bottomup') + + # test bottomup training + dataset = self.build_h36m_dataset( + data_mode='bottomup', + seq_len=27, + seq_step=1, + causal=False, + pad_video_seq=True) + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0], data_mode='bottomup') + + # test bottomup testing + dataset = self.build_h36m_dataset(data_mode='bottomup', test_mode=True) + self.assertEqual(len(dataset), 4) + self.check_data_info_keys(dataset[0], data_mode='bottomup') + + def test_exceptions_and_warnings(self): + + with self.assertRaisesRegex(ValueError, 'got invalid data_mode'): + _ = self.build_h36m_dataset(data_mode='invalid') + + SUPPORTED_keypoint_2d_src = {'gt', 'detection', 'pipeline'} + with self.assertRaisesRegex( + ValueError, 'Unsupported `keypoint_2d_src` "invalid". ' + f'Supported options are {SUPPORTED_keypoint_2d_src}'): + _ = self.build_h36m_dataset( + data_mode='topdown', + test_mode=False, + keypoint_2d_src='invalid') + + with self.assertRaisesRegex(AssertionError, + 'Annotation file does not exist'): + _ = self.build_h36m_dataset( + data_mode='topdown', test_mode=False, ann_file='invalid') + + with self.assertRaisesRegex(AssertionError, + 'Unsupported `subset_frac` 2.'): + _ = self.build_h36m_dataset(data_mode='topdown', subset_frac=2) diff --git a/tests/test_datasets/test_transforms/test_pose3d_transforms.py b/tests/test_datasets/test_transforms/test_pose3d_transforms.py new file mode 100644 index 0000000000..5f5d5aa096 --- /dev/null +++ b/tests/test_datasets/test_transforms/test_pose3d_transforms.py @@ -0,0 +1,150 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from copy import deepcopy +from unittest import TestCase + +import numpy as np +from mmengine.fileio import load + +from mmpose.datasets.transforms import RandomFlipAroundRoot + + +def get_h36m_sample(): + + def _parse_h36m_imgname(imgname): + """Parse imgname to get information of subject, action and camera. + + A typical h36m image filename is like: + S1_Directions_1.54138969_000001.jpg + """ + subj, rest = osp.basename(imgname).split('_', 1) + action, rest = rest.split('.', 1) + camera, rest = rest.split('_', 1) + return subj, action, camera + + ann_flle = 'tests/data/h36m/test_h36m_body3d.npz' + camera_param_file = 'tests/data/h36m/cameras.pkl' + + data = np.load(ann_flle) + cameras = load(camera_param_file) + + imgnames = data['imgname'] + keypoints = data['part'].astype(np.float32) + keypoints_3d = data['S'].astype(np.float32) + centers = data['center'].astype(np.float32) + scales = data['scale'].astype(np.float32) + + idx = 0 + target_idx = 0 + + data_info = { + 'keypoints': keypoints[idx, :, :2].reshape(1, -1, 2), + 'keypoints_visible': keypoints[idx, :, 2].reshape(1, -1), + 'keypoints_3d': keypoints_3d[idx, :, :3].reshape(1, -1, 3), + 'keypoints_3d_visible': keypoints_3d[idx, :, 3].reshape(1, -1), + 'scale': scales[idx], + 'center': centers[idx].astype(np.float32).reshape(1, -1), + 'id': idx, + 'img_ids': [idx], + 'img_paths': [imgnames[idx]], + 'category_id': 1, + 'iscrowd': 0, + 'sample_idx': idx, + 'lifting_target': keypoints_3d[target_idx, :, :3], + 'lifting_target_visible': keypoints_3d[target_idx, :, 3], + 'target_img_path': osp.join('tests/data/h36m', imgnames[target_idx]), + } + + # add camera parameters + subj, _, camera = _parse_h36m_imgname(imgnames[idx]) + data_info['camera_param'] = cameras[(subj, camera)] + + # add ann_info + ann_info = {} + ann_info['num_keypoints'] = 17 + ann_info['dataset_keypoint_weights'] = np.full(17, 1.0, dtype=np.float32) + ann_info['flip_pairs'] = [[1, 4], [2, 5], [3, 6], [11, 14], [12, 15], + [13, 16]] + ann_info['skeleton_links'] = [] + ann_info['upper_body_ids'] = (0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) + ann_info['lower_body_ids'] = (1, 2, 3, 4, 5, 6) + ann_info['flip_indices'] = [ + 0, 4, 5, 6, 1, 2, 3, 7, 8, 9, 10, 14, 15, 16, 11, 12, 13 + ] + + data_info.update(ann_info) + + return data_info + + +class TestRandomFlipAroundRoot(TestCase): + + def setUp(self): + self.data_info = get_h36m_sample() + self.keypoints_flip_cfg = dict(center_mode='static', center_x=0.) + self.target_flip_cfg = dict(center_mode='root', center_index=0) + + def test_init(self): + _ = RandomFlipAroundRoot( + self.keypoints_flip_cfg, + self.target_flip_cfg, + flip_prob=0.5, + flip_camera=False) + + def test_transform(self): + kpts1 = self.data_info['keypoints'] + kpts_vis1 = self.data_info['keypoints_visible'] + tar1 = self.data_info['lifting_target'] + tar_vis1 = self.data_info['lifting_target_visible'] + + transform = RandomFlipAroundRoot( + self.keypoints_flip_cfg, self.target_flip_cfg, flip_prob=1) + results = deepcopy(self.data_info) + results = transform(results) + + kpts2 = results['keypoints'] + kpts_vis2 = results['keypoints_visible'] + tar2 = results['lifting_target'] + tar_vis2 = results['lifting_target_visible'] + + self.assertEqual(kpts_vis2.shape, (1, 17)) + self.assertEqual(tar_vis2.shape, (17, )) + self.assertEqual(kpts2.shape, (1, 17, 2)) + self.assertEqual(tar2.shape, (17, 3)) + + flip_indices = [ + 0, 4, 5, 6, 1, 2, 3, 7, 8, 9, 10, 14, 15, 16, 11, 12, 13 + ] + for left, right in enumerate(flip_indices): + self.assertTrue( + np.allclose(-kpts1[0][left][:1], kpts2[0][right][:1], atol=4.)) + self.assertTrue( + np.allclose(kpts1[0][left][1:], kpts2[0][right][1:], atol=4.)) + self.assertTrue( + np.allclose(tar1[left][1:], tar2[right][1:], atol=4.)) + + self.assertTrue( + np.allclose(kpts_vis1[0][left], kpts_vis2[0][right], atol=4.)) + self.assertTrue( + np.allclose(tar_vis1[left], tar_vis2[right], atol=4.)) + + # test camera flipping + transform = RandomFlipAroundRoot( + self.keypoints_flip_cfg, + self.target_flip_cfg, + flip_prob=1, + flip_camera=True) + results = deepcopy(self.data_info) + results = transform(results) + + camera2 = results['camera_param'] + self.assertTrue( + np.allclose( + -self.data_info['camera_param']['c'][0], + camera2['c'][0], + atol=4.)) + self.assertTrue( + np.allclose( + -self.data_info['camera_param']['p'][0], + camera2['p'][0], + atol=4.)) diff --git a/tests/test_evaluation/test_functional/test_keypoint_eval.py b/tests/test_evaluation/test_functional/test_keypoint_eval.py index 2234c8e547..47ede83921 100644 --- a/tests/test_evaluation/test_functional/test_keypoint_eval.py +++ b/tests/test_evaluation/test_functional/test_keypoint_eval.py @@ -1,163 +1,212 @@ # Copyright (c) OpenMMLab. All rights reserved. +from unittest import TestCase + import numpy as np from numpy.testing import assert_array_almost_equal from mmpose.evaluation.functional import (keypoint_auc, keypoint_epe, - keypoint_nme, keypoint_pck_accuracy, + keypoint_mpjpe, keypoint_nme, + keypoint_pck_accuracy, multilabel_classification_accuracy, pose_pck_accuracy) -def test_keypoint_pck_accuracy(): - output = np.zeros((2, 5, 2)) - target = np.zeros((2, 5, 2)) - mask = np.array([[True, True, False, True, True], - [True, True, False, True, True]]) - thr = np.full((2, 2), 10, dtype=np.float32) - # first channel - output[0, 0] = [10, 0] - target[0, 0] = [10, 0] - # second channel - output[0, 1] = [20, 20] - target[0, 1] = [10, 10] - # third channel - output[0, 2] = [0, 0] - target[0, 2] = [-1, 0] - # fourth channel - output[0, 3] = [30, 30] - target[0, 3] = [30, 30] - # fifth channel - output[0, 4] = [0, 10] - target[0, 4] = [0, 10] - - acc, avg_acc, cnt = keypoint_pck_accuracy(output, target, mask, 0.5, thr) - - assert_array_almost_equal(acc, np.array([1, 0.5, -1, 1, 1]), decimal=4) - assert abs(avg_acc - 0.875) < 1e-4 - assert abs(cnt - 4) < 1e-4 - - acc, avg_acc, cnt = keypoint_pck_accuracy(output, target, mask, 0.5, - np.zeros((2, 2))) - assert_array_almost_equal(acc, np.array([-1, -1, -1, -1, -1]), decimal=4) - assert abs(avg_acc) < 1e-4 - assert abs(cnt) < 1e-4 - - acc, avg_acc, cnt = keypoint_pck_accuracy(output, target, mask, 0.5, - np.array([[0, 0], [10, 10]])) - assert_array_almost_equal(acc, np.array([1, 1, -1, 1, 1]), decimal=4) - assert abs(avg_acc - 1) < 1e-4 - assert abs(cnt - 4) < 1e-4 - - -def test_keypoint_auc(): - output = np.zeros((1, 5, 2)) - target = np.zeros((1, 5, 2)) - mask = np.array([[True, True, False, True, True]]) - # first channel - output[0, 0] = [10, 4] - target[0, 0] = [10, 0] - # second channel - output[0, 1] = [10, 18] - target[0, 1] = [10, 10] - # third channel - output[0, 2] = [0, 0] - target[0, 2] = [0, -1] - # fourth channel - output[0, 3] = [40, 40] - target[0, 3] = [30, 30] - # fifth channel - output[0, 4] = [20, 10] - target[0, 4] = [0, 10] - - auc = keypoint_auc(output, target, mask, 20, 4) - assert abs(auc - 0.375) < 1e-4 - - -def test_keypoint_epe(): - output = np.zeros((1, 5, 2)) - target = np.zeros((1, 5, 2)) - mask = np.array([[True, True, False, True, True]]) - # first channel - output[0, 0] = [10, 4] - target[0, 0] = [10, 0] - # second channel - output[0, 1] = [10, 18] - target[0, 1] = [10, 10] - # third channel - output[0, 2] = [0, 0] - target[0, 2] = [-1, -1] - # fourth channel - output[0, 3] = [40, 40] - target[0, 3] = [30, 30] - # fifth channel - output[0, 4] = [20, 10] - target[0, 4] = [0, 10] - - epe = keypoint_epe(output, target, mask) - assert abs(epe - 11.5355339) < 1e-4 - - -def test_keypoint_nme(): - output = np.zeros((1, 5, 2)) - target = np.zeros((1, 5, 2)) - mask = np.array([[True, True, False, True, True]]) - # first channel - output[0, 0] = [10, 4] - target[0, 0] = [10, 0] - # second channel - output[0, 1] = [10, 18] - target[0, 1] = [10, 10] - # third channel - output[0, 2] = [0, 0] - target[0, 2] = [-1, -1] - # fourth channel - output[0, 3] = [40, 40] - target[0, 3] = [30, 30] - # fifth channel - output[0, 4] = [20, 10] - target[0, 4] = [0, 10] - - normalize_factor = np.ones((output.shape[0], output.shape[2])) - - nme = keypoint_nme(output, target, mask, normalize_factor) - assert abs(nme - 11.5355339) < 1e-4 - - -def test_pose_pck_accuracy(): - output = np.zeros((1, 5, 64, 64), dtype=np.float32) - target = np.zeros((1, 5, 64, 64), dtype=np.float32) - mask = np.array([[True, True, False, False, False]]) - # first channel - output[0, 0, 20, 20] = 1 - target[0, 0, 10, 10] = 1 - # second channel - output[0, 1, 30, 30] = 1 - target[0, 1, 30, 30] = 1 - - acc, avg_acc, cnt = pose_pck_accuracy(output, target, mask) - - assert_array_almost_equal(acc, np.array([0, 1, -1, -1, -1]), decimal=4) - assert abs(avg_acc - 0.5) < 1e-4 - assert abs(cnt - 2) < 1e-4 - - -def test_multilabel_classification_accuracy(): - output = np.array([[0.7, 0.8, 0.4], [0.8, 0.1, 0.1]]) - target = np.array([[1, 0, 0], [1, 0, 1]]) - mask = np.array([[True, True, True], [True, True, True]]) - thr = 0.5 - acc = multilabel_classification_accuracy(output, target, mask, thr) - assert acc == 0 - - output = np.array([[0.7, 0.2, 0.4], [0.8, 0.1, 0.9]]) - thr = 0.5 - acc = multilabel_classification_accuracy(output, target, mask, thr) - assert acc == 1 - - thr = 0.3 - acc = multilabel_classification_accuracy(output, target, mask, thr) - assert acc == 0.5 - - mask = np.array([[True, True, False], [True, True, True]]) - acc = multilabel_classification_accuracy(output, target, mask, thr) - assert acc == 1 +class TestKeypointEval(TestCase): + + def test_keypoint_pck_accuracy(self): + + output = np.zeros((2, 5, 2)) + target = np.zeros((2, 5, 2)) + mask = np.array([[True, True, False, True, True], + [True, True, False, True, True]]) + + # first channel + output[0, 0] = [10, 0] + target[0, 0] = [10, 0] + # second channel + output[0, 1] = [20, 20] + target[0, 1] = [10, 10] + # third channel + output[0, 2] = [0, 0] + target[0, 2] = [-1, 0] + # fourth channel + output[0, 3] = [30, 30] + target[0, 3] = [30, 30] + # fifth channel + output[0, 4] = [0, 10] + target[0, 4] = [0, 10] + + thr = np.full((2, 2), 10, dtype=np.float32) + + acc, avg_acc, cnt = keypoint_pck_accuracy(output, target, mask, 0.5, + thr) + + assert_array_almost_equal(acc, np.array([1, 0.5, -1, 1, 1]), decimal=4) + self.assertAlmostEqual(avg_acc, 0.875, delta=1e-4) + self.assertAlmostEqual(cnt, 4, delta=1e-4) + + acc, avg_acc, cnt = keypoint_pck_accuracy(output, target, mask, 0.5, + np.zeros((2, 2))) + assert_array_almost_equal( + acc, np.array([-1, -1, -1, -1, -1]), decimal=4) + self.assertAlmostEqual(avg_acc, 0, delta=1e-4) + self.assertAlmostEqual(cnt, 0, delta=1e-4) + + acc, avg_acc, cnt = keypoint_pck_accuracy(output, target, mask, 0.5, + np.array([[0, 0], [10, 10]])) + assert_array_almost_equal(acc, np.array([1, 1, -1, 1, 1]), decimal=4) + self.assertAlmostEqual(avg_acc, 1, delta=1e-4) + self.assertAlmostEqual(cnt, 4, delta=1e-4) + + def test_keypoint_auc(self): + output = np.zeros((1, 5, 2)) + target = np.zeros((1, 5, 2)) + mask = np.array([[True, True, False, True, True]]) + # first channel + output[0, 0] = [10, 4] + target[0, 0] = [10, 0] + # second channel + output[0, 1] = [10, 18] + target[0, 1] = [10, 10] + # third channel + output[0, 2] = [0, 0] + target[0, 2] = [0, -1] + # fourth channel + output[0, 3] = [40, 40] + target[0, 3] = [30, 30] + # fifth channel + output[0, 4] = [20, 10] + target[0, 4] = [0, 10] + + auc = keypoint_auc(output, target, mask, 20, 4) + self.assertAlmostEqual(auc, 0.375, delta=1e-4) + + def test_keypoint_epe(self): + output = np.zeros((1, 5, 2)) + target = np.zeros((1, 5, 2)) + mask = np.array([[True, True, False, True, True]]) + # first channel + output[0, 0] = [10, 4] + target[0, 0] = [10, 0] + # second channel + output[0, 1] = [10, 18] + target[0, 1] = [10, 10] + # third channel + output[0, 2] = [0, 0] + target[0, 2] = [-1, -1] + # fourth channel + output[0, 3] = [40, 40] + target[0, 3] = [30, 30] + # fifth channel + output[0, 4] = [20, 10] + target[0, 4] = [0, 10] + + epe = keypoint_epe(output, target, mask) + self.assertAlmostEqual(epe, 11.5355339, delta=1e-4) + + def test_keypoint_nme(self): + output = np.zeros((1, 5, 2)) + target = np.zeros((1, 5, 2)) + mask = np.array([[True, True, False, True, True]]) + # first channel + output[0, 0] = [10, 4] + target[0, 0] = [10, 0] + # second channel + output[0, 1] = [10, 18] + target[0, 1] = [10, 10] + # third channel + output[0, 2] = [0, 0] + target[0, 2] = [-1, -1] + # fourth channel + output[0, 3] = [40, 40] + target[0, 3] = [30, 30] + # fifth channel + output[0, 4] = [20, 10] + target[0, 4] = [0, 10] + + normalize_factor = np.ones((output.shape[0], output.shape[2])) + + nme = keypoint_nme(output, target, mask, normalize_factor) + self.assertAlmostEqual(nme, 11.5355339, delta=1e-4) + + def test_pose_pck_accuracy(self): + output = np.zeros((1, 5, 64, 64), dtype=np.float32) + target = np.zeros((1, 5, 64, 64), dtype=np.float32) + mask = np.array([[True, True, False, False, False]]) + # first channel + output[0, 0, 20, 20] = 1 + target[0, 0, 10, 10] = 1 + # second channel + output[0, 1, 30, 30] = 1 + target[0, 1, 30, 30] = 1 + + acc, avg_acc, cnt = pose_pck_accuracy(output, target, mask) + + assert_array_almost_equal(acc, np.array([0, 1, -1, -1, -1]), decimal=4) + self.assertAlmostEqual(avg_acc, 0.5, delta=1e-4) + self.assertAlmostEqual(cnt, 2, delta=1e-4) + + def test_multilabel_classification_accuracy(self): + output = np.array([[0.7, 0.8, 0.4], [0.8, 0.1, 0.1]]) + target = np.array([[1, 0, 0], [1, 0, 1]]) + mask = np.array([[True, True, True], [True, True, True]]) + thr = 0.5 + acc = multilabel_classification_accuracy(output, target, mask, thr) + self.assertEqual(acc, 0) + + output = np.array([[0.7, 0.2, 0.4], [0.8, 0.1, 0.9]]) + thr = 0.5 + acc = multilabel_classification_accuracy(output, target, mask, thr) + self.assertEqual(acc, 1) + + thr = 0.3 + acc = multilabel_classification_accuracy(output, target, mask, thr) + self.assertEqual(acc, 0.5) + + mask = np.array([[True, True, False], [True, True, True]]) + acc = multilabel_classification_accuracy(output, target, mask, thr) + self.assertEqual(acc, 1) + + def test_keypoint_mpjpe(self): + output = np.zeros((2, 5, 3)) + target = np.zeros((2, 5, 3)) + mask = np.array([[True, True, False, True, True], + [True, True, False, True, True]]) + + # first channel + output[0, 0] = [1, 0, 0] + target[0, 0] = [1, 0, 0] + output[1, 0] = [1, 0, 0] + target[1, 0] = [1, 1, 0] + # second channel + output[0, 1] = [2, 2, 0] + target[0, 1] = [1, 1, 1] + output[1, 1] = [2, 2, 1] + target[1, 1] = [1, 0, 1] + # third channel + output[0, 2] = [0, 0, -1] + target[0, 2] = [-1, 0, 0] + output[1, 2] = [-1, 0, 0] + target[1, 2] = [-1, 0, 0] + # fourth channel + output[0, 3] = [3, 3, 1] + target[0, 3] = [3, 3, 1] + output[1, 3] = [0, 0, 3] + target[1, 3] = [0, 0, 3] + # fifth channel + output[0, 4] = [0, 1, 1] + target[0, 4] = [0, 1, 0] + output[1, 4] = [0, 0, 1] + target[1, 4] = [1, 1, 0] + + mpjpe = keypoint_mpjpe(output, target, mask) + self.assertAlmostEqual(mpjpe, 0.9625211990796929, delta=1e-4) + + p_mpjpe = keypoint_mpjpe(output, target, mask, 'procrustes') + self.assertAlmostEqual(p_mpjpe, 1.0047897634604497, delta=1e-4) + + s_mpjpe = keypoint_mpjpe(output, target, mask, 'scale') + self.assertAlmostEqual(s_mpjpe, 1.0277129678465953, delta=1e-4) + + with self.assertRaises(ValueError): + _ = keypoint_mpjpe(output, target, mask, 'alignment') diff --git a/tests/test_evaluation/test_metrics/test_keypoint_3d_metrics.py b/tests/test_evaluation/test_metrics/test_keypoint_3d_metrics.py new file mode 100644 index 0000000000..8289b09d0f --- /dev/null +++ b/tests/test_evaluation/test_metrics/test_keypoint_3d_metrics.py @@ -0,0 +1,70 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from unittest import TestCase + +import numpy as np +from mmengine.structures import InstanceData + +from mmpose.evaluation import MPJPE +from mmpose.structures import PoseDataSample + + +class TestMPJPE(TestCase): + + def setUp(self): + """Setup variables used in every test method.""" + self.batch_size = 8 + num_keypoints = 15 + self.data_batch = [] + self.data_samples = [] + + for i in range(self.batch_size): + gt_instances = InstanceData() + keypoints = np.random.random((1, num_keypoints, 3)) + gt_instances.lifting_target = np.random.random((num_keypoints, 3)) + gt_instances.lifting_target_visible = np.ones( + (num_keypoints, 1)).astype(bool) + + pred_instances = InstanceData() + pred_instances.keypoints = keypoints + np.random.normal( + 0, 0.01, keypoints.shape) + + data = {'inputs': None} + data_sample = PoseDataSample( + gt_instances=gt_instances, pred_instances=pred_instances) + data_sample.set_metainfo( + dict(target_img_path='tests/data/h36m/S7/' + 'S7_Greeting.55011271/S7_Greeting.55011271_000396.jpg')) + + self.data_batch.append(data) + self.data_samples.append(data_sample.to_dict()) + + def test_init(self): + """Test metric init method.""" + # Test invalid mode + with self.assertRaisesRegex( + KeyError, "`mode` should be 'mpjpe', 'p-mpjpe', or 'n-mpjpe', " + "but got 'invalid'."): + MPJPE(mode='invalid') + + def test_evaluate(self): + """Test MPJPE evaluation metric.""" + mpjpe_metric = MPJPE(mode='mpjpe') + mpjpe_metric.process(self.data_batch, self.data_samples) + mpjpe = mpjpe_metric.evaluate(self.batch_size) + self.assertIsInstance(mpjpe, dict) + self.assertIn('MPJPE', mpjpe) + self.assertTrue(mpjpe['MPJPE'] >= 0) + + p_mpjpe_metric = MPJPE(mode='p-mpjpe') + p_mpjpe_metric.process(self.data_batch, self.data_samples) + p_mpjpe = p_mpjpe_metric.evaluate(self.batch_size) + self.assertIsInstance(p_mpjpe, dict) + self.assertIn('P-MPJPE', p_mpjpe) + self.assertTrue(p_mpjpe['P-MPJPE'] >= 0) + + n_mpjpe_metric = MPJPE(mode='n-mpjpe') + n_mpjpe_metric.process(self.data_batch, self.data_samples) + n_mpjpe = n_mpjpe_metric.evaluate(self.batch_size) + self.assertIsInstance(n_mpjpe, dict) + self.assertIn('N-MPJPE', n_mpjpe) + self.assertTrue(n_mpjpe['N-MPJPE'] >= 0) diff --git a/tests/test_visualization/test_fast_visualizer.py b/tests/test_visualization/test_fast_visualizer.py new file mode 100644 index 0000000000..f4a24ca1f9 --- /dev/null +++ b/tests/test_visualization/test_fast_visualizer.py @@ -0,0 +1,71 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from unittest import TestCase + +import numpy as np + +from mmpose.visualization import FastVisualizer + + +class TestFastVisualizer(TestCase): + + def setUp(self): + self.metainfo = { + 'keypoint_id2name': { + 0: 'nose', + 1: 'left_eye', + 2: 'right_eye' + }, + 'keypoint_name2id': { + 'nose': 0, + 'left_eye': 1, + 'right_eye': 2 + }, + 'keypoint_colors': np.array([[255, 0, 0], [0, 255, 0], [0, 0, + 255]]), + 'skeleton_links': [(0, 1), (1, 2)], + 'skeleton_link_colors': np.array([[255, 255, 0], [255, 0, 255]]) + } + self.visualizer = FastVisualizer(self.metainfo) + + def test_init(self): + self.assertEqual(self.visualizer.radius, 6) + self.assertEqual(self.visualizer.line_width, 3) + self.assertEqual(self.visualizer.kpt_thr, 0.3) + self.assertEqual(self.visualizer.keypoint_id2name, + self.metainfo['keypoint_id2name']) + self.assertEqual(self.visualizer.keypoint_name2id, + self.metainfo['keypoint_name2id']) + np.testing.assert_array_equal(self.visualizer.keypoint_colors, + self.metainfo['keypoint_colors']) + self.assertEqual(self.visualizer.skeleton_links, + self.metainfo['skeleton_links']) + np.testing.assert_array_equal(self.visualizer.skeleton_link_colors, + self.metainfo['skeleton_link_colors']) + + def test_draw_pose(self): + img = np.zeros((480, 640, 3), dtype=np.uint8) + instances = type('Instances', (object, ), {})() + instances.keypoints = np.array([[[100, 100], [200, 200], [300, 300]]], + dtype=np.float32) + instances.keypoint_scores = np.array([[0.5, 0.5, 0.5]], + dtype=np.float32) + + self.visualizer.draw_pose(img, instances) + + # Check if keypoints are drawn + self.assertNotEqual(img[100, 100].tolist(), [0, 0, 0]) + self.assertNotEqual(img[200, 200].tolist(), [0, 0, 0]) + self.assertNotEqual(img[300, 300].tolist(), [0, 0, 0]) + + # Check if skeleton links are drawn + self.assertNotEqual(img[150, 150].tolist(), [0, 0, 0]) + self.assertNotEqual(img[250, 250].tolist(), [0, 0, 0]) + + def test_draw_pose_with_none_instances(self): + img = np.zeros((480, 640, 3), dtype=np.uint8) + instances = None + + self.visualizer.draw_pose(img, instances) + + # Check if the image is still empty (black) + self.assertEqual(np.count_nonzero(img), 0) From cda20ca9c51e1045b69918c10602f3537566f4cb Mon Sep 17 00:00:00 2001 From: Tau Date: Fri, 2 Jun 2023 14:48:03 +0800 Subject: [PATCH 13/52] [Docs] add PyTorch 2.0 Docs and update refactory progress (#2425) --- README.md | 8 ++++---- README_CN.md | 8 ++++---- docs/en/notes/pytorch_2.md | 13 ++++++++++++- docs/zh_cn/notes/pytorch_2.md | 13 ++++++++++++- 4 files changed, 32 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index c40b9cdc4c..749b75e307 100644 --- a/README.md +++ b/README.md @@ -139,18 +139,18 @@ MMPose v1.0.0 is a major update, including many API and config file changes. Cur | HigherHRNet (CVPR 2020) | | | DeepPose (CVPR 2014) | done | | RLE (ICCV 2021) | done | -| SoftWingloss (TIP 2021) | | -| VideoPose3D (CVPR 2019) | in progress | +| SoftWingloss (TIP 2021) | done | +| VideoPose3D (CVPR 2019) | done | | Hourglass (ECCV 2016) | done | | LiteHRNet (CVPR 2021) | done | | AdaptiveWingloss (ICCV 2019) | done | | SimpleBaseline2D (ECCV 2018) | done | | PoseWarper (NeurIPS 2019) | | -| SimpleBaseline3D (ICCV 2017) | in progress | +| SimpleBaseline3D (ICCV 2017) | done | | HMR (CVPR 2018) | | | UDP (CVPR 2020) | done | | VIPNAS (CVPR 2021) | done | -| Wingloss (CVPR 2018) | | +| Wingloss (CVPR 2018) | done | | DarkPose (CVPR 2020) | done | | Associative Embedding (NIPS 2017) | in progress | | VoxelPose (ECCV 2020) | | diff --git a/README_CN.md b/README_CN.md index 519e9889da..5e48568001 100644 --- a/README_CN.md +++ b/README_CN.md @@ -137,18 +137,18 @@ MMPose v1.0.0 是一个重大更新,包括了大量的 API 和配置文件的 | HigherHRNet (CVPR 2020) | | | DeepPose (CVPR 2014) | done | | RLE (ICCV 2021) | done | -| SoftWingloss (TIP 2021) | | -| VideoPose3D (CVPR 2019) | in progress | +| SoftWingloss (TIP 2021) | done | +| VideoPose3D (CVPR 2019) | done | | Hourglass (ECCV 2016) | done | | LiteHRNet (CVPR 2021) | done | | AdaptiveWingloss (ICCV 2019) | done | | SimpleBaseline2D (ECCV 2018) | done | | PoseWarper (NeurIPS 2019) | | -| SimpleBaseline3D (ICCV 2017) | in progress | +| SimpleBaseline3D (ICCV 2017) | done | | HMR (CVPR 2018) | | | UDP (CVPR 2020) | done | | VIPNAS (CVPR 2021) | done | -| Wingloss (CVPR 2018) | | +| Wingloss (CVPR 2018) | done | | DarkPose (CVPR 2020) | done | | Associative Embedding (NIPS 2017) | in progress | | VoxelPose (ECCV 2020) | | diff --git a/docs/en/notes/pytorch_2.md b/docs/en/notes/pytorch_2.md index cd1d73f3fc..4892e554a5 100644 --- a/docs/en/notes/pytorch_2.md +++ b/docs/en/notes/pytorch_2.md @@ -1,3 +1,14 @@ # PyTorch 2.0 Compatibility and Benchmarks -Coming soon. +MMPose 1.0.0 is now compatible with PyTorch 2.0, ensuring that users can leverage the latest features and performance improvements offered by the PyTorch 2.0 framework when using MMPose. With the integration of inductor, users can expect faster model speeds. The table below shows several example models: + +| Model | Training Speed | Memory | +| :-------- | :---------------------: | :-----------: | +| ViTPose-B | 29.6% ↑ (0.931 → 0.655) | 10586 → 10663 | +| ViTPose-S | 33.7% ↑ (0.563 → 0.373) | 6091 → 6170 | +| HRNet-w32 | 12.8% ↑ (0.553 → 0.482) | 9849 → 10145 | +| HRNet-w48 | 37.1% ↑ (0.437 → 0.275) | 7319 → 7394 | +| RTMPose-t | 6.3% ↑ (1.533 → 1.437) | 6292 → 6489 | +| RTMPose-s | 13.1% ↑ (1.645 → 1.430) | 9013 → 9208 | + +- Pytorch 2.0 test, add projects doc and refactor by @LareinaM in [PR#2136](https://github.com/open-mmlab/mmpose/pull/2136) diff --git a/docs/zh_cn/notes/pytorch_2.md b/docs/zh_cn/notes/pytorch_2.md index cd1d73f3fc..4892e554a5 100644 --- a/docs/zh_cn/notes/pytorch_2.md +++ b/docs/zh_cn/notes/pytorch_2.md @@ -1,3 +1,14 @@ # PyTorch 2.0 Compatibility and Benchmarks -Coming soon. +MMPose 1.0.0 is now compatible with PyTorch 2.0, ensuring that users can leverage the latest features and performance improvements offered by the PyTorch 2.0 framework when using MMPose. With the integration of inductor, users can expect faster model speeds. The table below shows several example models: + +| Model | Training Speed | Memory | +| :-------- | :---------------------: | :-----------: | +| ViTPose-B | 29.6% ↑ (0.931 → 0.655) | 10586 → 10663 | +| ViTPose-S | 33.7% ↑ (0.563 → 0.373) | 6091 → 6170 | +| HRNet-w32 | 12.8% ↑ (0.553 → 0.482) | 9849 → 10145 | +| HRNet-w48 | 37.1% ↑ (0.437 → 0.275) | 7319 → 7394 | +| RTMPose-t | 6.3% ↑ (1.533 → 1.437) | 6292 → 6489 | +| RTMPose-s | 13.1% ↑ (1.645 → 1.430) | 9013 → 9208 | + +- Pytorch 2.0 test, add projects doc and refactor by @LareinaM in [PR#2136](https://github.com/open-mmlab/mmpose/pull/2136) From 79f7cf7d4801653b27fbe16e0a8363a5a31db7d2 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Tue, 6 Jun 2023 20:34:43 +0800 Subject: [PATCH 14/52] [Fix] fix `merge_args` in tools/test.py (#2431) --- .../topdown_heatmap/coco/vitpose_coco.md | 2 +- tools/test.py | 34 +++++++++++-------- tools/train.py | 2 +- 3 files changed, 21 insertions(+), 17 deletions(-) diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md index f9266001d5..67e2a9cb3b 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md @@ -1,7 +1,7 @@ To utilize ViTPose, you'll need to have [MMClassification](https://github.com/open-mmlab/mmclassification). To install the required version, run the following command: ```shell -mim install 'mmcls>=1.0.0rc5' +mim install 'mmcls>=1.0.0rc6' ``` diff --git a/tools/test.py b/tools/test.py index 3a22ae78c5..5dc0110260 100644 --- a/tools/test.py +++ b/tools/test.py @@ -60,6 +60,20 @@ def parse_args(): def merge_args(cfg, args): """Merge CLI arguments to config.""" + + cfg.launcher = args.launcher + cfg.load_from = args.checkpoint + + # -------------------- work directory -------------------- + # work_dir is determined in this priority: CLI > segment in file > filename + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.work_dir = args.work_dir + elif cfg.get('work_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.work_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.config))[0]) + # -------------------- visualization -------------------- if args.show or (args.show_dir is not None): assert 'visualization' in cfg.default_hooks, \ @@ -80,10 +94,14 @@ def merge_args(cfg, args): 'The dump file must be a pkl file.' dump_metric = dict(type='DumpResults', out_file_path=args.dump) if isinstance(cfg.test_evaluator, (list, tuple)): - cfg.test_evaluator = list(cfg.test_evaluator).append(dump_metric) + cfg.test_evaluator = [*cfg.test_evaluator, dump_metric] else: cfg.test_evaluator = [cfg.test_evaluator, dump_metric] + # -------------------- Other arguments -------------------- + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + return cfg @@ -93,20 +111,6 @@ def main(): # load config cfg = Config.fromfile(args.config) cfg = merge_args(cfg, args) - cfg.launcher = args.launcher - if args.cfg_options is not None: - cfg.merge_from_dict(args.cfg_options) - - # work_dir is determined in this priority: CLI > segment in file > filename - if args.work_dir is not None: - # update configs according to CLI args if args.work_dir is not None - cfg.work_dir = args.work_dir - elif cfg.get('work_dir', None) is None: - # use config filename as default work_dir if cfg.work_dir is None - cfg.work_dir = osp.join('./work_dirs', - osp.splitext(osp.basename(args.config))[0]) - - cfg.load_from = args.checkpoint # build the runner from config runner = Runner.from_cfg(cfg) diff --git a/tools/train.py b/tools/train.py index e086d95d73..e1930c20c7 100644 --- a/tools/train.py +++ b/tools/train.py @@ -115,7 +115,7 @@ def merge_args(cfg, args): if args.auto_scale_lr: cfg.auto_scale_lr.enable = True - # visualization- + # visualization if args.show or (args.show_dir is not None): assert 'visualization' in cfg.default_hooks, \ 'PoseVisualizationHook is not set in the ' \ From 94e15226a29a7067d9bb0cb7937b86e3c3fd0c8e Mon Sep 17 00:00:00 2001 From: Tau Date: Wed, 7 Jun 2023 00:11:45 +0800 Subject: [PATCH 15/52] [Feature] Add RTMPose-Halpe26 models (#2430) --- configs/_base_/datasets/halpe26.py | 274 +++++++++ ...=> rtmpose-l_8xb256-420e_body8-256x192.py} | 4 +- ...=> rtmpose-l_8xb256-420e_body8-384x288.py} | 4 +- ...ose-l_8xb512-700e_body8-halpe26-256x192.py | 535 +++++++++++++++++ ...ose-l_8xb512-700e_body8-halpe26-384x288.py | 535 +++++++++++++++++ ...=> rtmpose-m_8xb256-420e_body8-256x192.py} | 4 +- ...=> rtmpose-m_8xb256-420e_body8-384x288.py} | 4 +- ...ose-m_8xb512-700e_body8-halpe26-256x192.py | 529 +++++++++++++++++ ...ose-m_8xb512-700e_body8-halpe26-384x288.py | 542 ++++++++++++++++++ ...se-s_8xb1024-700e_body8-halpe26-256x192.py | 535 +++++++++++++++++ ...=> rtmpose-s_8xb256-420e_body8-256x192.py} | 4 +- ...se-t_8xb1024-700e_body8-halpe26-256x192.py | 536 +++++++++++++++++ ...=> rtmpose-t_8xb256-420e_body8-256x192.py} | 4 +- ...ose-x_8xb256-700e_body8-halpe26-384x288.py | 535 +++++++++++++++++ ...rtmpose_body8.md => rtmpose_body8-coco.md} | 12 +- ...mpose_body8.yml => rtmpose_body8-coco.yml} | 24 +- .../rtmpose/body8/rtmpose_body8-halpe26.md | 74 +++ .../rtmpose/body8/rtmpose_body8-halpe26.yml | 106 ++++ projects/rtmpose/README.md | 25 +- projects/rtmpose/README_CN.md | 25 +- .../rtmpose-m_8xb64-210e_ap10k-256x256.py | 23 +- .../rtmpose-l_8xb256-420e_coco-256x192.py | 16 +- .../rtmpose-l_8xb256-420e_coco-384x288.py | 16 +- ...ose-l_8xb512-700e_body8-halpe26-256x192.py | 535 +++++++++++++++++ ...ose-l_8xb512-700e_body8-halpe26-384x288.py | 535 +++++++++++++++++ .../rtmpose-m_8xb256-420e_coco-256x192.py | 22 +- .../rtmpose-m_8xb256-420e_coco-384x288.py | 22 +- ...ose-m_8xb512-700e_body8-halpe26-256x192.py | 529 +++++++++++++++++ ...ose-m_8xb512-700e_body8-halpe26-384x288.py | 542 ++++++++++++++++++ ...se-s_8xb1024-700e_body8-halpe26-256x192.py | 535 +++++++++++++++++ .../rtmpose-s_8xb256-420e_coco-256x192.py | 16 +- ...se-t_8xb1024-700e_body8-halpe26-256x192.py | 536 +++++++++++++++++ .../rtmpose-t_8xb256-420e_coco-256x192.py | 16 +- ...ose-x_8xb256-700e_body8-halpe26-384x288.py | 535 +++++++++++++++++ .../rtmpose-m_8xb256-120e_lapa-256x256.py | 16 +- .../rtmpose-s_8xb256-120e_lapa-256x256.py | 16 +- .../rtmpose-t_8xb256-120e_lapa-256x256.py | 16 +- ..._8xb32-210e_coco-wholebody-hand-256x256.py | 21 +- ...ose-l_8xb32-270e_coco-wholebody-384x288.py | 21 +- ...ose-l_8xb64-270e_coco-wholebody-256x192.py | 21 +- ...ose-m_8xb64-270e_coco-wholebody-256x192.py | 21 +- 41 files changed, 8179 insertions(+), 142 deletions(-) create mode 100644 configs/_base_/datasets/halpe26.py rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose-l_8xb256-210e_body8-256x192.py => rtmpose-l_8xb256-420e_body8-256x192.py} (99%) rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose-l_8xb256-210e_body8-384x288.py => rtmpose-l_8xb256-420e_body8-384x288.py} (99%) create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose-m_8xb256-210e_body8-256x192.py => rtmpose-m_8xb256-420e_body8-256x192.py} (99%) rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose-m_8xb256-210e_body8-384x288.py => rtmpose-m_8xb256-420e_body8-384x288.py} (99%) create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose-s_8xb256-210e_body8-256x192.py => rtmpose-s_8xb256-420e_body8-256x192.py} (99%) create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose-t_8xb256-210e_body8-256x192.py => rtmpose-t_8xb256-420e_body8-256x192.py} (99%) create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose_body8.md => rtmpose_body8-coco.md} (93%) rename configs/body_2d_keypoint/rtmpose/body8/{rtmpose_body8.yml => rtmpose_body8-coco.yml} (87%) create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.md create mode 100644 configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.yml create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py diff --git a/configs/_base_/datasets/halpe26.py b/configs/_base_/datasets/halpe26.py new file mode 100644 index 0000000000..cb4df83874 --- /dev/null +++ b/configs/_base_/datasets/halpe26.py @@ -0,0 +1,274 @@ +dataset_info = dict( + dataset_name='halpe26', + paper_info=dict( + author='Li, Yong-Lu and Xu, Liang and Liu, Xinpeng and Huang, Xijie' + ' and Xu, Yue and Wang, Shiyi and Fang, Hao-Shu' + ' and Ma, Ze and Chen, Mingyang and Lu, Cewu', + title='PaStaNet: Toward Human Activity Knowledge Engine', + container='CVPR', + year='2020', + homepage='https://github.com/Fang-Haoshu/Halpe-FullBody/', + ), + keypoint_info={ + 0: + dict(name='nose', id=0, color=[51, 153, 255], type='upper', swap=''), + 1: + dict( + name='left_eye', + id=1, + color=[51, 153, 255], + type='upper', + swap='right_eye'), + 2: + dict( + name='right_eye', + id=2, + color=[51, 153, 255], + type='upper', + swap='left_eye'), + 3: + dict( + name='left_ear', + id=3, + color=[51, 153, 255], + type='upper', + swap='right_ear'), + 4: + dict( + name='right_ear', + id=4, + color=[51, 153, 255], + type='upper', + swap='left_ear'), + 5: + dict( + name='left_shoulder', + id=5, + color=[0, 255, 0], + type='upper', + swap='right_shoulder'), + 6: + dict( + name='right_shoulder', + id=6, + color=[255, 128, 0], + type='upper', + swap='left_shoulder'), + 7: + dict( + name='left_elbow', + id=7, + color=[0, 255, 0], + type='upper', + swap='right_elbow'), + 8: + dict( + name='right_elbow', + id=8, + color=[255, 128, 0], + type='upper', + swap='left_elbow'), + 9: + dict( + name='left_wrist', + id=9, + color=[0, 255, 0], + type='upper', + swap='right_wrist'), + 10: + dict( + name='right_wrist', + id=10, + color=[255, 128, 0], + type='upper', + swap='left_wrist'), + 11: + dict( + name='left_hip', + id=11, + color=[0, 255, 0], + type='lower', + swap='right_hip'), + 12: + dict( + name='right_hip', + id=12, + color=[255, 128, 0], + type='lower', + swap='left_hip'), + 13: + dict( + name='left_knee', + id=13, + color=[0, 255, 0], + type='lower', + swap='right_knee'), + 14: + dict( + name='right_knee', + id=14, + color=[255, 128, 0], + type='lower', + swap='left_knee'), + 15: + dict( + name='left_ankle', + id=15, + color=[0, 255, 0], + type='lower', + swap='right_ankle'), + 16: + dict( + name='right_ankle', + id=16, + color=[255, 128, 0], + type='lower', + swap='left_ankle'), + 17: + dict(name='head', id=17, color=[255, 128, 0], type='upper', swap=''), + 18: + dict(name='neck', id=18, color=[255, 128, 0], type='upper', swap=''), + 19: + dict(name='hip', id=19, color=[255, 128, 0], type='lower', swap=''), + 20: + dict( + name='left_big_toe', + id=20, + color=[255, 128, 0], + type='lower', + swap='right_big_toe'), + 21: + dict( + name='right_big_toe', + id=21, + color=[255, 128, 0], + type='lower', + swap='left_big_toe'), + 22: + dict( + name='left_small_toe', + id=22, + color=[255, 128, 0], + type='lower', + swap='right_small_toe'), + 23: + dict( + name='right_small_toe', + id=23, + color=[255, 128, 0], + type='lower', + swap='left_small_toe'), + 24: + dict( + name='left_heel', + id=24, + color=[255, 128, 0], + type='lower', + swap='right_heel'), + 25: + dict( + name='right_heel', + id=25, + color=[255, 128, 0], + type='lower', + swap='left_heel') + }, + skeleton_info={ + 0: + dict(link=('left_ankle', 'left_knee'), id=0, color=[0, 255, 0]), + 1: + dict(link=('left_knee', 'left_hip'), id=1, color=[0, 255, 0]), + 2: + dict(link=('left_hip', 'hip'), id=2, color=[0, 255, 0]), + 3: + dict(link=('right_ankle', 'right_knee'), id=3, color=[255, 128, 0]), + 4: + dict(link=('right_knee', 'right_hip'), id=4, color=[255, 128, 0]), + 5: + dict(link=('right_hip', 'hip'), id=5, color=[255, 128, 0]), + 6: + dict(link=('head', 'neck'), id=6, color=[51, 153, 255]), + 7: + dict(link=('neck', 'hip'), id=7, color=[51, 153, 255]), + 8: + dict(link=('neck', 'left_shoulder'), id=8, color=[0, 255, 0]), + 9: + dict(link=('left_shoulder', 'left_elbow'), id=9, color=[0, 255, 0]), + 10: + dict(link=('left_elbow', 'left_wrist'), id=10, color=[0, 255, 0]), + 11: + dict(link=('neck', 'right_shoulder'), id=11, color=[255, 128, 0]), + 12: + dict( + link=('right_shoulder', 'right_elbow'), id=12, color=[255, 128, + 0]), + 13: + dict(link=('right_elbow', 'right_wrist'), id=13, color=[255, 128, 0]), + 14: + dict(link=('left_eye', 'right_eye'), id=14, color=[51, 153, 255]), + 15: + dict(link=('nose', 'left_eye'), id=15, color=[51, 153, 255]), + 16: + dict(link=('nose', 'right_eye'), id=16, color=[51, 153, 255]), + 17: + dict(link=('left_eye', 'left_ear'), id=17, color=[51, 153, 255]), + 18: + dict(link=('right_eye', 'right_ear'), id=18, color=[51, 153, 255]), + 19: + dict(link=('left_ear', 'left_shoulder'), id=19, color=[51, 153, 255]), + 20: + dict( + link=('right_ear', 'right_shoulder'), id=20, color=[51, 153, 255]), + 21: + dict(link=('left_ankle', 'left_big_toe'), id=21, color=[0, 255, 0]), + 22: + dict(link=('left_ankle', 'left_small_toe'), id=22, color=[0, 255, 0]), + 23: + dict(link=('left_ankle', 'left_heel'), id=23, color=[0, 255, 0]), + 24: + dict( + link=('right_ankle', 'right_big_toe'), id=24, color=[255, 128, 0]), + 25: + dict( + link=('right_ankle', 'right_small_toe'), + id=25, + color=[255, 128, 0]), + 26: + dict(link=('right_ankle', 'right_heel'), id=26, color=[255, 128, 0]), + }, + # the joint_weights is modified by MMPose Team + joint_weights=[ + 1., 1., 1., 1., 1., 1., 1., 1.2, 1.2, 1.5, 1.5, 1., 1., 1.2, 1.2, 1.5, + 1.5 + ] + [1., 1., 1.2] + [1.5] * 6, + + # 'https://github.com/Fang-Haoshu/Halpe-FullBody/blob/master/' + # 'HalpeCOCOAPI/PythonAPI/halpecocotools/cocoeval.py#L245' + sigmas=[ + 0.026, + 0.025, + 0.025, + 0.035, + 0.035, + 0.079, + 0.079, + 0.072, + 0.072, + 0.062, + 0.062, + 0.107, + 0.107, + 0.087, + 0.087, + 0.089, + 0.089, + 0.026, + 0.026, + 0.066, + 0.079, + 0.079, + 0.079, + 0.079, + 0.079, + 0.079, + ]) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-256x192.py similarity index 99% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-256x192.py index 3ccd98b514..1cf3380435 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-256x192.py @@ -1,8 +1,8 @@ _base_ = ['../../../_base_/default_runtime.py'] # runtime -max_epochs = 210 -stage2_num_epochs = 30 +max_epochs = 420 +stage2_num_epochs = 20 base_lr = 4e-3 train_cfg = dict(max_epochs=max_epochs, val_interval=10) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-384x288.py similarity index 99% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-384x288.py index 3a9fa00d5c..19b3c8afb6 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-384x288.py @@ -1,8 +1,8 @@ _base_ = ['../../../_base_/default_runtime.py'] # runtime -max_epochs = 210 -stage2_num_epochs = 30 +max_epochs = 420 +stage2_num_epochs = 20 base_lr = 4e-3 train_cfg = dict(max_epochs=max_epochs, val_interval=10) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..293a5f07ea --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py @@ -0,0 +1,535 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1., + widen_factor=1., + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1024, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=5, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=5, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py new file mode 100644 index 0000000000..0aa16f3db4 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py @@ -0,0 +1,535 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1., + widen_factor=1., + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1024, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-256x192.py similarity index 99% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-256x192.py index 28424e4247..be462bfddf 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-256x192.py @@ -1,8 +1,8 @@ _base_ = ['../../../_base_/default_runtime.py'] # runtime -max_epochs = 210 -stage2_num_epochs = 30 +max_epochs = 420 +stage2_num_epochs = 20 base_lr = 4e-3 train_cfg = dict(max_epochs=max_epochs, val_interval=10) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-384x288.py similarity index 99% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-384x288.py index 39da665365..64cfc8a604 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-384x288.py @@ -1,8 +1,8 @@ _base_ = ['../../../_base_/default_runtime.py'] # runtime -max_epochs = 210 -stage2_num_epochs = 30 +max_epochs = 420 +stage2_num_epochs = 20 base_lr = 4e-3 train_cfg = dict(max_epochs=max_epochs, val_interval=10) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..e694dd27d9 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py @@ -0,0 +1,529 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=768, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py new file mode 100644 index 0000000000..5ee967a309 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py @@ -0,0 +1,542 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=768, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +# backend_args = dict(backend='local') +backend_args = dict( + backend='petrel', + path_mapping=dict({ + f'{data_root}': 's3://openmmlab/datasets/', + f'{data_root}': 's3://openmmlab/datasets/' + })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +# default_hooks = dict( +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..05e6ec0980 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py @@ -0,0 +1,535 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 1024 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.0), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=512, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.6, 1.4], + rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-420e_body8-256x192.py similarity index 99% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-420e_body8-256x192.py index 16d86c5fa6..7d0a697751 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-420e_body8-256x192.py @@ -1,8 +1,8 @@ _base_ = ['../../../_base_/default_runtime.py'] # runtime -max_epochs = 210 -stage2_num_epochs = 30 +max_epochs = 420 +stage2_num_epochs = 20 base_lr = 4e-3 train_cfg = dict(max_epochs=max_epochs, val_interval=10) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..8d70bd27ae --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py @@ -0,0 +1,536 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 1024 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.167, + widen_factor=0.375, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-tiny_udp-body7_210e-256x192-a3775292_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=384, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.6, 1.4], + rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + # dict( + # type='EMAHook', + # ema_type='ExpMomentumEMA', + # momentum=0.0002, + # update_buffers=True, + # priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-420e_body8-256x192.py similarity index 99% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-420e_body8-256x192.py index 8ec873e246..bdc7f80a2b 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-420e_body8-256x192.py @@ -1,8 +1,8 @@ _base_ = ['../../../_base_/default_runtime.py'] # runtime -max_epochs = 210 -stage2_num_epochs = 30 +max_epochs = 420 +stage2_num_epochs = 20 base_lr = 4e-3 train_cfg = dict(max_epochs=max_epochs, val_interval=10) diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py new file mode 100644 index 0000000000..e50aa42f0e --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py @@ -0,0 +1,535 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 20 +base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1.33, + widen_factor=1.25, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-x_udp-body7_210e-384x288-d28b58e6_20230529.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1280, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8.md b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-coco.md similarity index 93% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8.md rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-coco.md index a294be844e..5355a7f35b 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8.md +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-coco.md @@ -68,9 +68,9 @@ | Config | Input Size | AP
(COCO) | PCK@0.1
(Body8) | AUC
(Body8) | EPE
(Body8) | Params(M) | FLOPS(G) | Download | | :--------------------------------------------: | :--------: | :---------------: | :---------------------: | :-----------------: | :-----------------: | :-------: | :------: | :-----------------------------------------------: | -| [RTMPose-t\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py) | 256x192 | 65.9 | 91.44 | 63.18 | 19.45 | 3.34 | 0.36 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth) | -| [RTMPose-s\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py) | 256x192 | 69.7 | 92.45 | 65.15 | 17.85 | 5.47 | 0.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth) | -| [RTMPose-m\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py) | 256x192 | 74.9 | 94.25 | 68.59 | 15.12 | 13.59 | 1.93 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth) | -| [RTMPose-l\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 13.79 | 27.66 | 4.16 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | -| [RTMPose-m\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.98 | 13.72 | 4.33 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | -| [RTMPose-l\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 13.08 | 27.79 | 9.35 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | +| [RTMPose-t\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-420e_body8-256x192.py) | 256x192 | 65.9 | 91.44 | 63.18 | 19.45 | 3.34 | 0.36 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth) | +| [RTMPose-s\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-420e_body8-256x192.py) | 256x192 | 69.7 | 92.45 | 65.15 | 17.85 | 5.47 | 0.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth) | +| [RTMPose-m\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-256x192.py) | 256x192 | 74.9 | 94.25 | 68.59 | 15.12 | 13.59 | 1.93 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth) | +| [RTMPose-l\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 13.79 | 27.66 | 4.16 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | +| [RTMPose-m\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.98 | 13.72 | 4.33 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | +| [RTMPose-l\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 13.08 | 27.79 | 9.35 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8.yml b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-coco.yml similarity index 87% rename from configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8.yml rename to configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-coco.yml index c0f5a90863..9299eccb77 100644 --- a/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8.yml +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-coco.yml @@ -5,7 +5,7 @@ Collections: URL: https://arxiv.org/abs/2303.07399 README: https://github.com/open-mmlab/mmpose/blob/main/projects/rtmpose/README.md Models: -- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-210e_body8-256x192.py +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb256-420e_body8-256x192.py In Collection: RTMPose Metadata: Architecture: &id001 @@ -18,7 +18,7 @@ Models: - sub-JHMDB - Halpe - PoseTrack18 - Name: rtmpose-t_8xb256-210e_body8-256x192 + Name: rtmpose-t_8xb256-420e_body8-256x192 Results: - Dataset: Body8 Metrics: @@ -26,12 +26,12 @@ Models: Mean@0.1: 0.914 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7_420e-256x192-026a1439_20230504.pth -- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-210e_body8-256x192.py +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb256-420e_body8-256x192.py In Collection: RTMPose Metadata: Architecture: *id001 Training Data: *id002 - Name: rtmpose-s_8xb256-210e_body8-256x192 + Name: rtmpose-s_8xb256-420e_body8-256x192 Results: - Dataset: Body8 Metrics: @@ -39,12 +39,12 @@ Models: Mean@0.1: 0.925 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth -- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-256x192.py +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-256x192.py In Collection: RTMPose Metadata: Architecture: *id001 Training Data: *id002 - Name: rtmpose-m_8xb256-210e_body8-256x192 + Name: rtmpose-m_8xb256-420e_body8-256x192 Results: - Dataset: Body8 Metrics: @@ -52,12 +52,12 @@ Models: Mean@0.1: 0.943 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth -- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-256x192.py +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-256x192.py In Collection: RTMPose Metadata: Architecture: *id001 Training Data: *id002 - Name: rtmpose-l_8xb256-210e_body8-256x192 + Name: rtmpose-l_8xb256-420e_body8-256x192 Results: - Dataset: Body8 Metrics: @@ -65,12 +65,12 @@ Models: Mean@0.1: 0.951 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth -- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-210e_body8-384x288.py +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb256-420e_body8-384x288.py In Collection: RTMPose Metadata: Architecture: *id001 Training Data: *id002 - Name: rtmpose-m_8xb256-210e_body8-384x288 + Name: rtmpose-m_8xb256-420e_body8-384x288 Results: - Dataset: Body8 Metrics: @@ -78,12 +78,12 @@ Models: Mean@0.1: 0.946 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth -- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-210e_body8-384x288.py +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb256-420e_body8-384x288.py In Collection: RTMPose Metadata: Architecture: *id001 Training Data: *id002 - Name: rtmpose-l_8xb256-210e_body8-384x288 + Name: rtmpose-l_8xb256-420e_body8-384x288 Results: - Dataset: Body8 Metrics: diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.md b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.md new file mode 100644 index 0000000000..153b71c663 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.md @@ -0,0 +1,74 @@ + + +
+RTMPose (arXiv'2023) + +```bibtex +@misc{https://doi.org/10.48550/arxiv.2303.07399, + doi = {10.48550/ARXIV.2303.07399}, + url = {https://arxiv.org/abs/2303.07399}, + author = {Jiang, Tao and Lu, Peng and Zhang, Li and Ma, Ningsheng and Han, Rui and Lyu, Chengqi and Li, Yining and Chen, Kai}, + keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences}, + title = {RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose}, + publisher = {arXiv}, + year = {2023}, + copyright = {Creative Commons Attribution 4.0 International} +} + +``` + +
+ + + +
+RTMDet (arXiv'2022) + +```bibtex +@misc{lyu2022rtmdet, + title={RTMDet: An Empirical Study of Designing Real-Time Object Detectors}, + author={Chengqi Lyu and Wenwei Zhang and Haian Huang and Yue Zhou and Yudong Wang and Yanyi Liu and Shilong Zhang and Kai Chen}, + year={2022}, + eprint={2212.07784}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +
+ + + +
+AlphaPose (TPAMI'2022) + +```bibtex +@article{alphapose, + author = {Fang, Hao-Shu and Li, Jiefeng and Tang, Hongyang and Xu, Chao and Zhu, Haoyi and Xiu, Yuliang and Li, Yong-Lu and Lu, Cewu}, + journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, + title = {AlphaPose: Whole-Body Regional Multi-Person Pose Estimation and Tracking in Real-Time}, + year = {2022} +} +``` + +
+ +- `*` denotes model trained on 7 public datasets: + - [AI Challenger](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#aic) + - [MS COCO](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#coco) + - [CrowdPose](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#crowdpose) + - [MPII](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#mpii) + - [sub-JHMDB](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#sub-jhmdb-dataset) + - [Halpe](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_wholebody_keypoint.html#halpe) + - [PoseTrack18](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#posetrack18) +- `Body8` denotes the addition of the [OCHuman](https://mmpose.readthedocs.io/en/latest/dataset_zoo/2d_body_keypoint.html#ochuman) dataset, in addition to the 7 datasets mentioned above, for evaluation. + +| Config | Input Size | PCK@0.1
(Body8) | AUC
(Body8) | Params(M) | FLOPS(G) | Download | +| :--------------------------------------------------------------: | :--------: | :---------------------: | :-----------------: | :-------: | :------: | :-----------------------------------------------------------------: | +| [RTMPose-t\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py) | 256x192 | 91.89 | 66.35 | 3.51 | 0.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7-halpe26_700e-256x192-6020f8a6_20230605.pth) | +| [RTMPose-s\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py) | 256x192 | 93.01 | 68.62 | 5.70 | 0.70 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7-halpe26_700e-256x192-7f134165_20230605.pth) | +| [RTMPose-m\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py) | 256x192 | 94.75 | 71.91 | 13.93 | 1.95 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.pth) | +| [RTMPose-l\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py) | 256x192 | 95.37 | 73.19 | 28.11 | 4.19 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-256x192-2abb7558_20230605.pth) | +| [RTMPose-m\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py) | 384x288 | 95.15 | 73.56 | 14.06 | 4.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-384x288-89e6428b_20230605.pth) | +| [RTMPose-l\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py) | 384x288 | 95.56 | 74.38 | 28.24 | 9.40 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-384x288-734182ce_20230605.pth) | +| [RTMPose-x\*](/configs/body_2d_keypoint/rtmpose/body8/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py) | 384x288 | 95.74 | 74.82 | 50.00 | 17.29 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-body7_pt-body7-halpe26_700e-384x288-7fb6e239_20230606.pth) | diff --git a/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.yml b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.yml new file mode 100644 index 0000000000..ceef6f9998 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/body8/rtmpose_body8-halpe26.yml @@ -0,0 +1,106 @@ +Collections: +- Name: RTMPose + Paper: + Title: "RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose" + URL: https://arxiv.org/abs/2303.07399 + README: https://github.com/open-mmlab/mmpose/blob/main/projects/rtmpose/README.md +Models: +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py + In Collection: RTMPose + Metadata: + Architecture: &id001 + - RTMPose + Training Data: &id002 + - AI Challenger + - COCO + - CrowdPose + - MPII + - sub-JHMDB + - Halpe + - PoseTrack18 + Name: rtmpose-t_8xb1024-700e_body8-halpe26-256x192 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.919 + AUC: 0.664 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7-halpe26_700e-256x192-6020f8a6_20230605.pth +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-s_8xb1024-700e_body8-halpe26-256x192 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.930 + AUC: 0.682 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7-halpe26_700e-256x192-7f134165_20230605.pth +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-m_8xb512-700e_body8-halpe26-256x192 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.947 + AUC: 0.719 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.pth +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-l_8xb512-700e_body8-halpe26-256x192 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.954 + AUC: 0.732 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-256x192-2abb7558_20230605.pth +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-m_8xb512-700e_body8-halpe26-384x288 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.952 + AUC: 0.736 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-384x288-89e6428b_20230605.pth +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-l_8xb512-700e_body8-halpe26-384x288 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.956 + AUC: 0.744 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-384x288-734182ce_20230605.pth +- Config: configs/body_2d_keypoint/rtmpose/body8/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-x_8xb256-700e_body8-halpe26-384x288 + Results: + - Dataset: Body8 + Metrics: + Mean@0.1: 0.957 + AUC: 0.748 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-body7_pt-body7-halpe26_700e-384x288-7fb6e239_20230606.pth diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index b070f24d1e..d5d64cd999 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -50,6 +50,8 @@ ______________________________________________________________________ ## 🥳 🚀 What's New [🔝](#-table-of-contents) +- Jun. 2023: + - Release 26-keypoint Body models trained on combined datasets. - May. 2023: - Add [code examples](./examples/) of RTMPose. - Release Hand, Face, Body models trained on combined datasets. @@ -158,7 +160,9 @@ Feel free to join our community group for more help: - Inference speed measured on more hardware platforms can refer to [Benchmark](./benchmark/README.md) - If you have datasets you would like us to support, feel free to [contact us](https://docs.google.com/forms/d/e/1FAIpQLSfzwWr3eNlDzhU98qzk2Eph44Zio6hi5r0iSwfO9wSARkHdWg/viewform?usp=sf_link)/[联系我们](https://uua478.fanqier.cn/f/xxmynrki). -### Body 2d (17 Keypoints) +### Body 2d + +#### 17 Keypoints
AIC+COCO @@ -198,6 +202,21 @@ Feel free to join our community group for more help:
+#### 26 Keypoints + +- Keypoints are defined as [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/). For details please refer to the [meta info](/configs/_base_/datasets/halpe26.py). +- Models are trained and evaluated on `Body8`. + +| Config | Input Size | PCK@0.1
(Body8) | AUC
(Body8) | Params(M) | FLOPS(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :---------------------------------------------------------------------------------------: | :--------: | :---------------------: | :-----------------: | :-------: | :------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-t\*](./rtmpose/body_2d_keypoint/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py) | 256x192 | 91.89 | 66.35 | 3.51 | 0.37 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7-halpe26_700e-256x192-6020f8a6_20230605.pth) | +| [RTMPose-s\*](./rtmpose/body_2d_keypoint/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py) | 256x192 | 93.01 | 68.62 | 5.70 | 0.70 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7-halpe26_700e-256x192-7f134165_20230605.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py) | 256x192 | 94.75 | 71.91 | 13.93 | 1.95 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py) | 256x192 | 95.37 | 73.19 | 28.11 | 4.19 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-256x192-2abb7558_20230605.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py) | 384x288 | 95.15 | 73.56 | 14.06 | 4.37 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-384x288-89e6428b_20230605.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py) | 384x288 | 95.56 | 74.38 | 28.24 | 9.40 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-384x288-734182ce_20230605.pth) | +| [RTMPose-x\*](./rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py) | 384x288 | 95.74 | 74.82 | 50.00 | 17.29 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-body7_pt-body7-halpe26_700e-384x288-7fb6e239_20230606.pth) | + #### Model Pruning **Notes** @@ -240,7 +259,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr | Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | | :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | | [RTMPose-t\*](./rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py) | 256x256 | 1.67 | 0.652 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-face6_pt-in1k_120e-256x256-df79d9a5_20230529.pth) | -| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | +| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | | [RTMPose-m\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.44 | 2.852 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-face6_pt-in1k_120e-256x256-72a37400_20230529.pth) | @@ -304,6 +323,7 @@ We provide the UDP pretraining configs of the CSPNeXt backbone. Find more detail | CSPNeXt-l\* | 256x192 | 32.44 | 5.32 | 75.7 | 97.76 | 69.57 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-256x192-5e9558ef_20230504.pth) | | CSPNeXt-m\* | 384x288 | 17.53 | 6.86 | 75.8 | 97.60 | 70.18 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-384x288-b9bc2b57_20230504.pth) | | CSPNeXt-l\* | 384x288 | 32.44 | 11.96 | 77.2 | 97.89 | 71.23 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-384x288-b15bc30d_20230504.pth) | +| CSPNeXt-x\* | 384x288 | 54.92 | 19.96 | 78.1 | 98.00 | 71.79 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-x_udp-body7_210e-384x288-d28b58e6_20230529.pth) | @@ -317,6 +337,7 @@ We also provide the ImageNet classification pre-trained weights of the CSPNeXt b | CSPNeXt-s | 224x224 | 4.89 | 0.66 | 74.41 | 92.23 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth) | | CSPNeXt-m | 224x224 | 13.05 | 1.93 | 79.27 | 94.79 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth) | | CSPNeXt-l | 224x224 | 27.16 | 4.19 | 81.30 | 95.62 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-l_8xb256-rsb-a1-600e_in1k-6a760974.pth) | +| CSPNeXt-x | 224x224 | 48.85 | 7.76 | 82.10 | 95.69 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-x_8xb256-rsb-a1-600e_in1k-b3f78edd.pth) | ## 👀 Visualization [🔝](#-table-of-contents) diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 01f5240fed..00c4b346d6 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -46,6 +46,8 @@ ______________________________________________________________________ ## 🥳 最新进展 [🔝](#-table-of-contents) +- 2023 年 6 月: + - 发布混合数据集训练的 26 点 Body 模型。 - 2023 年 5 月: - 添加 [代码示例](./examples/) - 发布混合数据集训练的 Hand, Face, Body 模型。 @@ -149,7 +151,9 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 - RTMPose 在更多硬件平台上的推理速度可以前往 [Benchmark](./benchmark/README_CN.md) 查看。 - 如果你有希望我们支持的数据集,欢迎[联系我们](https://uua478.fanqier.cn/f/xxmynrki)/[Google Questionnaire](https://docs.google.com/forms/d/e/1FAIpQLSfzwWr3eNlDzhU98qzk2Eph44Zio6hi5r0iSwfO9wSARkHdWg/viewform?usp=sf_link)! -### 人体 2d 关键点 (17 Keypoints) +### 人体 2d 关键点 + +#### 17 Keypoints
AIC+COCO @@ -189,6 +193,21 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性
+#### 26 Keypoints + +- 26 关键点的定义遵循 [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/),详情见 [meta info](/configs/_base_/datasets/halpe26.py)。 +- 模型在 `Body8` 上进行训练和评估。 + +| Config | Input Size | PCK@0.1
(Body8) | AUC
(Body8) | Params(M) | FLOPS(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | ncnn-FP16-Latency
(ms)
(Snapdragon 865) | Download | +| :---------------------------------------------------------------------------------------: | :--------: | :---------------------: | :-----------------: | :-------: | :------: | :-----------------------------------------: | :------------------------------------------------: | :-----------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------: | +| [RTMPose-t\*](./rtmpose/body_2d_keypoint/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py) | 256x192 | 91.89 | 66.35 | 3.51 | 0.37 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-body7_pt-body7-halpe26_700e-256x192-6020f8a6_20230605.pth) | +| [RTMPose-s\*](./rtmpose/body_2d_keypoint/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py) | 256x192 | 93.01 | 68.62 | 5.70 | 0.70 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-body7_pt-body7-halpe26_700e-256x192-7f134165_20230605.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py) | 256x192 | 94.75 | 71.91 | 13.93 | 1.95 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-256x192-4d3e73dd_20230605.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py) | 256x192 | 95.37 | 73.19 | 28.11 | 4.19 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-256x192-2abb7558_20230605.pth) | +| [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py) | 384x288 | 95.15 | 73.56 | 14.06 | 4.37 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7-halpe26_700e-384x288-89e6428b_20230605.pth) | +| [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py) | 384x288 | 95.56 | 74.38 | 28.24 | 9.40 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7-halpe26_700e-384x288-734182ce_20230605.pth) | +| [RTMPose-x\*](./rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py) | 384x288 | 95.74 | 74.82 | 50.00 | 17.29 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-body7_pt-body7-halpe26_700e-384x288-7fb6e239_20230606.pth) | + #### 模型剪枝 **说明** @@ -231,7 +250,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 | Config | Input Size | NME
(LaPa) | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | | :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | | [RTMPose-t\*](./rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py) | 256x256 | 1.67 | 0.652 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_simcc-face6_pt-in1k_120e-256x256-df79d9a5_20230529.pth) | -| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | +| [RTMPose-s\*](./rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py) | 256x256 | 1.59 | 1.119 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-face6_pt-in1k_120e-256x256-d779fdef_20230529.pth) | | [RTMPose-m\*](./rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py) | 256x256 | 1.44 | 2.852 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-face6_pt-in1k_120e-256x256-72a37400_20230529.pth) | @@ -295,6 +314,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 | CSPNeXt-l\* | 256x192 | 32.44 | 5.32 | 75.7 | 97.76 | 69.57 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-256x192-5e9558ef_20230504.pth) | | CSPNeXt-m\* | 384x288 | 17.53 | 6.86 | 75.8 | 97.60 | 70.18 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-body7_210e-384x288-b9bc2b57_20230504.pth) | | CSPNeXt-l\* | 384x288 | 32.44 | 11.96 | 77.2 | 97.89 | 71.23 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-l_udp-body7_210e-384x288-b15bc30d_20230504.pth) | +| CSPNeXt-x\* | 384x288 | 54.92 | 19.96 | 78.1 | 98.00 | 71.79 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-x_udp-body7_210e-384x288-d28b58e6_20230529.pth) | @@ -308,6 +328,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 | CSPNeXt-s | 224x224 | 4.89 | 0.66 | 74.41 | 92.23 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-s_imagenet_600e-ea671761.pth) | | CSPNeXt-m | 224x224 | 13.05 | 1.93 | 79.27 | 94.79 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth) | | CSPNeXt-l | 224x224 | 27.16 | 4.19 | 81.30 | 95.62 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-l_8xb256-rsb-a1-600e_in1k-6a760974.pth) | +| CSPNeXt-x | 224x224 | 48.85 | 7.76 | 82.10 | 95.69 | [Model](https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-x_8xb256-rsb-a1-600e_in1k-b3f78edd.pth) | ## 👀 可视化 [🔝](#-table-of-contents) diff --git a/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py b/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py index 337ce8cfd9..d25fd13e70 100644 --- a/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py +++ b/projects/rtmpose/rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (256, 256) + # runtime max_epochs = 210 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 64 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -39,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(256, 256), + input_size=input_size, sigma=(5.66, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -73,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=768, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -101,12 +108,6 @@ data_root = 'data/ap10k/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/pose/ap10k/', -# f'{data_root}': 's3://openmmlab/datasets/pose/ap10k/' -# })) # pipelines train_pipeline = [ @@ -176,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=64, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -189,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, @@ -204,7 +205,7 @@ pipeline=val_pipeline, )) test_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py index ab1479dc76..c472cac1fb 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (192, 256) + # runtime max_epochs = 420 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,7 +31,6 @@ begin=0, end=1000), dict( - # use cosine lr from 210 to 420 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(192, 256), + input_size=input_size, sigma=(4.9, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=1024, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -177,7 +183,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +196,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=64, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py index 5f8cadc5b0..47697078d5 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (288, 384) + # runtime max_epochs = 420 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,7 +31,6 @@ begin=0, end=1000), dict( - # use cosine lr from 210 to 420 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(288, 384), + input_size=input_size, sigma=(6., 6.93), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=1024, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -177,7 +183,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +196,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=64, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..fe19d45af9 --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-256x192.py @@ -0,0 +1,535 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1., + widen_factor=1., + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1024, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=5, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=5, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py new file mode 100644 index 0000000000..bec4fcb924 --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-l_8xb512-700e_body8-halpe26-384x288.py @@ -0,0 +1,535 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1., + widen_factor=1., + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1024, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py index 89b0e682f8..97e70667e6 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (192, 256) + # runtime max_epochs = 420 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,7 +31,6 @@ begin=0, end=1000), dict( - # use cosine lr from 210 to 420 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(192, 256), + input_size=input_size, sigma=(4.9, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=768, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -102,12 +108,6 @@ data_root = 'data/coco/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' -# })) # pipelines train_pipeline = [ @@ -177,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=64, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py index 64169d0b3b..5216cf1b44 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (288, 384) + # runtime max_epochs = 420 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,7 +31,6 @@ begin=0, end=1000), dict( - # use cosine lr from 210 to 420 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(288, 384), + input_size=input_size, sigma=(6., 6.93), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=768, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -102,12 +108,6 @@ data_root = 'data/coco/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' -# })) # pipelines train_pipeline = [ @@ -177,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=64, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..6391044c87 --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-256x192.py @@ -0,0 +1,529 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-256x192-e48f03d0_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=768, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py new file mode 100644 index 0000000000..2944058bd1 --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb512-700e_body8-halpe26-384x288.py @@ -0,0 +1,542 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 512 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=768, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +# backend_args = dict(backend='local') +backend_args = dict( + backend='petrel', + path_mapping=dict({ + f'{data_root}': 's3://openmmlab/datasets/', + f'{data_root}': 's3://openmmlab/datasets/' + })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +# default_hooks = dict( +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..3f7d985079 --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb1024-700e_body8-halpe26-256x192.py @@ -0,0 +1,535 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 1024 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.0), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/rtmpose-s_simcc-body7_pt-body7_420e-256x192-acd4a1ef_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=512, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.6, 1.4], + rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py index 2a18f9b9b1..dd854f10f0 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-s_8xb256-420e_coco-256x192.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (192, 256) + # runtime max_epochs = 420 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,7 +31,6 @@ begin=0, end=1000), dict( - # use cosine lr from 210 to 420 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(192, 256), + input_size=input_size, sigma=(4.9, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=512, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -177,7 +183,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +196,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=64, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py new file mode 100644 index 0000000000..69100b6cdc --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb1024-700e_body8-halpe26-256x192.py @@ -0,0 +1,536 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (192, 256) + +# runtime +max_epochs = 700 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 1024 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.167, + widen_factor=0.375, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-tiny_udp-body7_210e-256x192-a3775292_20230504.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=384, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.6, 1.4], + rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + # dict( + # type='EMAHook', + # ema_type='ExpMomentumEMA', + # momentum=0.0002, + # update_buffers=True, + # priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py index cf37c86131..1f344c72d1 100644 --- a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-t_8xb256-420e_coco-256x192.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 17 +input_size = (192, 256) + # runtime max_epochs = 420 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -24,7 +31,6 @@ begin=0, end=1000), dict( - # use cosine lr from 210 to 420 epoch type='CosineAnnealingLR', eta_min=base_lr * 0.05, begin=max_epochs // 2, @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(192, 256), + input_size=input_size, sigma=(4.9, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=384, - out_channels=17, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -177,7 +183,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +196,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=64, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py new file mode 100644 index 0000000000..e0ad3aeb9d --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_body8-halpe26-384x288.py @@ -0,0 +1,535 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 26 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 20 +base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1.33, + widen_factor=1.25, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-x_udp-body7_210e-384x288-d28b58e6_20230529.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1280, + out_channels=num_keypoints, + input_size=input_size, + in_featuremap_size=tuple([s // 32 for s in input_size]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict( + type='GenerateTarget', + encoder=codec, + use_dataset_keypoint_weights=True), + dict(type='PackPoseInputs') +] + +# mapping +coco_halpe26 = [(i, i) for i in range(17)] + [(17, 20), (18, 22), (19, 24), + (20, 21), (21, 23), (22, 25)] + +aic_halpe26 = [(0, 6), (1, 8), (2, 10), (3, 5), (4, 7), + (5, 9), (6, 12), (7, 14), (8, 16), (9, 11), (10, 13), (11, 15), + (12, 17), (13, 18)] + +crowdpose_halpe26 = [(0, 5), (1, 6), (2, 7), (3, 8), (4, 9), (5, 10), (6, 11), + (7, 12), (8, 13), (9, 14), (10, 15), (11, 16), (12, 17), + (13, 18)] + +mpii_halpe26 = [ + (0, 16), + (1, 14), + (2, 12), + (3, 11), + (4, 13), + (5, 15), + (8, 18), + (9, 17), + (10, 10), + (11, 8), + (12, 6), + (13, 5), + (14, 7), + (15, 9), +] + +jhmdb_halpe26 = [ + (0, 18), + (2, 17), + (3, 6), + (4, 5), + (5, 12), + (6, 11), + (7, 8), + (8, 7), + (9, 14), + (10, 13), + (11, 10), + (12, 9), + (13, 16), + (14, 15), +] + +halpe_halpe26 = [(i, i) for i in range(26)] + +ochuman_halpe26 = [(i, i) for i in range(17)] + +posetrack_halpe26 = [ + (0, 0), + (2, 17), + (3, 3), + (4, 4), + (5, 5), + (6, 6), + (7, 7), + (8, 8), + (9, 9), + (10, 10), + (11, 11), + (12, 12), + (13, 13), + (14, 14), + (15, 15), + (16, 16), +] + +# train datasets +dataset_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +dataset_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +dataset_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_trainval.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +dataset_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_train.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +dataset_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_train.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +dataset_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_train_v1.json', + data_prefix=dict(img='pose/Halpe/hico_20160224_det/images/train2015'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +dataset_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_train.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + pin_memory=True, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + dataset_coco, + dataset_aic, + dataset_crowdpose, + dataset_mpii, + dataset_jhmdb, + dataset_halpe, + dataset_posetrack, + ], + pipeline=train_pipeline, + test_mode=False, + )) + +# val datasets +val_coco = dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=coco_halpe26) + ], +) + +val_aic = dict( + type='AicDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_val.json', + data_prefix=dict( + img='pose/ai_challenge/ai_challenger_keypoint' + '_validation_20170911/keypoint_validation_images_20170911/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=aic_halpe26) + ], +) + +val_crowdpose = dict( + type='CrowdPoseDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='crowdpose/annotations/mmpose_crowdpose_test.json', + data_prefix=dict(img='pose/CrowdPose/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=crowdpose_halpe26) + ], +) + +val_mpii = dict( + type='MpiiDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='mpii/annotations/mpii_val.json', + data_prefix=dict(img='pose/MPI/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=mpii_halpe26) + ], +) + +val_jhmdb = dict( + type='JhmdbDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='jhmdb/annotations/Sub1_test.json', + data_prefix=dict(img='pose/JHMDB/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=jhmdb_halpe26) + ], +) + +val_halpe = dict( + type='HalpeDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='halpe/annotations/halpe_val_v1.json', + data_prefix=dict(img='detection/coco/val2017/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=halpe_halpe26) + ], +) + +val_ochuman = dict( + type='OCHumanDataset', + data_root=data_root, + data_mode=data_mode, + ann_file='ochuman/annotations/' + 'ochuman_coco_format_val_range_0.00_1.00.json', + data_prefix=dict(img='pose/OCHuman/images/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=ochuman_halpe26) + ], +) + +val_posetrack = dict( + type='PoseTrack18Dataset', + data_root=data_root, + data_mode=data_mode, + ann_file='posetrack18/annotations/posetrack18_val.json', + data_prefix=dict(img='pose/PoseChallenge2018/'), + pipeline=[ + dict( + type='KeypointConverter', + num_keypoints=num_keypoints, + mapping=posetrack_halpe26) + ], +) + +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type='CombinedDataset', + metainfo=dict(from_file='configs/_base_/datasets/halpe26.py'), + datasets=[ + val_coco, + val_aic, + val_crowdpose, + val_mpii, + val_jhmdb, + val_halpe, + val_ochuman, + val_posetrack, + ], + pipeline=val_pipeline, + test_mode=True, + )) + +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='AUC', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +test_evaluator = [dict(type='PCKAccuracy', thr=0.1), dict(type='AUC')] +val_evaluator = test_evaluator diff --git a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py index baeca2c138..5490074a4d 100644 --- a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py +++ b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-m_8xb256-120e_lapa-256x256.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 106 +input_size = (256, 256) + # runtime max_epochs = 120 stage2_num_epochs = 10 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=1) randomness = dict(seed=21) @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(256, 256), + input_size=input_size, sigma=(5.66, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=768, - out_channels=106, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -171,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -184,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=4, persistent_workers=True, drop_last=False, @@ -199,7 +205,7 @@ pipeline=val_pipeline, )) test_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=4, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py index 777a67c28e..2763ecd927 100644 --- a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py +++ b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-s_8xb256-120e_lapa-256x256.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 106 +input_size = (256, 256) + # runtime max_epochs = 120 stage2_num_epochs = 10 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=1) randomness = dict(seed=21) @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(256, 256), + input_size=input_size, sigma=(5.66, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=512, - out_channels=106, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -171,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -184,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=4, persistent_workers=True, drop_last=False, @@ -199,7 +205,7 @@ pipeline=val_pipeline, )) test_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=4, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py index 2602b642cc..ad6e4b212f 100644 --- a/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py +++ b/projects/rtmpose/rtmpose/face_2d_keypoint/rtmpose-t_8xb256-120e_lapa-256x256.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 106 +input_size = (256, 256) + # runtime max_epochs = 120 stage2_num_epochs = 10 base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=1) randomness = dict(seed=21) @@ -40,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(256, 256), + input_size=input_size, sigma=(5.66, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -74,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=384, - out_channels=106, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -171,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=256, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -184,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=4, persistent_workers=True, drop_last=False, @@ -199,7 +205,7 @@ pipeline=val_pipeline, )) test_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=4, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py b/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py index eb477a9426..fc96cf7e67 100644 --- a/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py +++ b/projects/rtmpose/rtmpose/hand_2d_keypoint/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 21 +input_size = (256, 256) + # runtime max_epochs = 210 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 32 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -39,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(256, 256), + input_size=input_size, sigma=(5.66, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -73,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=768, - out_channels=21, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -101,12 +108,6 @@ data_root = 'data/coco/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' -# })) # pipelines train_pipeline = [ @@ -177,7 +178,7 @@ # data loaders train_dataloader = dict( - batch_size=32, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -190,7 +191,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py index df44b5a64f..19dc8f8d99 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 133 +input_size = (288, 384) + # runtime max_epochs = 270 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 32 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -39,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(288, 384), + input_size=input_size, sigma=(6., 6.93), simcc_split_ratio=2.0, normalize=False, @@ -73,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=1024, - out_channels=133, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -101,12 +108,6 @@ data_root = 'data/coco/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' -# })) # pipelines train_pipeline = [ @@ -176,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=32, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -189,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py index aa02d77209..7903e09923 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 133 +input_size = (192, 256) + # runtime max_epochs = 270 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 64 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -39,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(192, 256), + input_size=input_size, sigma=(4.9, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -73,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=1024, - out_channels=133, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -101,12 +108,6 @@ data_root = 'data/coco/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' -# })) # pipelines train_pipeline = [ @@ -176,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=64, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -189,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py index 309e0431b4..09c7a86840 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py @@ -1,9 +1,15 @@ _base_ = ['mmpose::_base_/default_runtime.py'] +# common setting +num_keypoints = 133 +input_size = (192, 256) + # runtime max_epochs = 270 stage2_num_epochs = 30 base_lr = 4e-3 +train_batch_size = 64 +val_batch_size = 32 train_cfg = dict(max_epochs=max_epochs, val_interval=10) randomness = dict(seed=21) @@ -12,6 +18,7 @@ optim_wrapper = dict( type='OptimWrapper', optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), paramwise_cfg=dict( norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) @@ -39,7 +46,7 @@ # codec settings codec = dict( type='SimCCLabel', - input_size=(192, 256), + input_size=input_size, sigma=(4.9, 5.66), simcc_split_ratio=2.0, normalize=False, @@ -73,7 +80,7 @@ head=dict( type='RTMCCHead', in_channels=768, - out_channels=133, + out_channels=num_keypoints, input_size=codec['input_size'], in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), simcc_split_ratio=codec['simcc_split_ratio'], @@ -101,12 +108,6 @@ data_root = 'data/coco/' backend_args = dict(backend='local') -# backend_args = dict( -# backend='petrel', -# path_mapping=dict({ -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', -# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' -# })) # pipelines train_pipeline = [ @@ -176,7 +177,7 @@ # data loaders train_dataloader = dict( - batch_size=64, + batch_size=train_batch_size, num_workers=10, persistent_workers=True, sampler=dict(type='DefaultSampler', shuffle=True), @@ -189,7 +190,7 @@ pipeline=train_pipeline, )) val_dataloader = dict( - batch_size=32, + batch_size=val_batch_size, num_workers=10, persistent_workers=True, drop_last=False, From 59eff4930e37f310cfc76a225bc7b9d109885e53 Mon Sep 17 00:00:00 2001 From: Tau Date: Wed, 7 Jun 2023 10:27:44 +0800 Subject: [PATCH 16/52] [Docs] update img (#2435) --- projects/rtmpose/README.md | 2 +- projects/rtmpose/README_CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index d5d64cd999..11f02892bb 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -67,7 +67,7 @@ ______________________________________________________________________
- +
### ✨ Major Features diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 00c4b346d6..0b64021872 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -63,7 +63,7 @@ ______________________________________________________________________
- +
### ✨ 主要特性 From fd39228ca8dfe958be431c3f1667e09db5603099 Mon Sep 17 00:00:00 2001 From: Yifan Lareina WU Date: Fri, 9 Jun 2023 14:33:26 +0800 Subject: [PATCH 17/52] [Enhance] Enhance functionality of 3d human pose (#2439) --- .../topdown_heatmap/coco/vitpose_coco.md | 3 +- demo/body3d_pose_lifter_demo.py | 408 ++++++++++-------- demo/docs/{ => en}/3d_human_pose_demo.md | 4 +- demo/docs/zh_cn/3d_human_pose_demo.md | 1 + docs/en/index.rst | 1 + docs/src/papers/algorithms/vitpose.md | 2 +- docs/zh_cn/index.rst | 1 + 7 files changed, 244 insertions(+), 176 deletions(-) rename demo/docs/{ => en}/3d_human_pose_demo.md (97%) create mode 100644 demo/docs/zh_cn/3d_human_pose_demo.md diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md index 67e2a9cb3b..409a5bf28b 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md @@ -7,8 +7,7 @@ mim install 'mmcls>=1.0.0rc6'
- -ViTPose (NeurIPS'2022) +ViTPose (NeurIPS'2022) ```bibtex @inproceedings{ diff --git a/demo/body3d_pose_lifter_demo.py b/demo/body3d_pose_lifter_demo.py index 02e3014f21..39179d3448 100644 --- a/demo/body3d_pose_lifter_demo.py +++ b/demo/body3d_pose_lifter_demo.py @@ -1,13 +1,15 @@ # Copyright (c) OpenMMLab. All rights reserved. +import mimetypes import os -import os.path as osp +import time from argparse import ArgumentParser from functools import partial import cv2 +import json_tricks as json import mmcv +import mmengine import numpy as np -from mmengine.registry import init_default_scope from mmengine.structures import InstanceData from mmpose.apis import (_track_by_iou, _track_by_oks, collect_multi_frames, @@ -16,7 +18,8 @@ from mmpose.models.pose_estimators import PoseLifter from mmpose.models.pose_estimators.topdown import TopdownPoseEstimator from mmpose.registry import VISUALIZERS -from mmpose.structures import PoseDataSample, merge_data_samples +from mmpose.structures import (PoseDataSample, merge_data_samples, + split_instances) from mmpose.utils import adapt_mmdet_pipeline try: @@ -156,6 +159,11 @@ def parse_args(): default='', help='Root of the output video file. ' 'Default not saving the visualization video.') + parser.add_argument( + '--save-predictions', + action='store_true', + default=False, + help='whether to save predicted results') parser.add_argument( '--device', default='cuda:0', help='Device used for inference') parser.add_argument( @@ -173,6 +181,8 @@ def parse_args(): '--use-oks-tracking', action='store_true', help='Using OKS tracking') parser.add_argument( '--tracking-thr', type=float, default=0.3, help='Tracking threshold') + parser.add_argument( + '--show-interval', type=int, default=0, help='Sleep seconds per frame') parser.add_argument( '--thickness', type=int, @@ -270,196 +280,250 @@ def main(): # the dataset_meta is loaded from the checkpoint local_visualizer.set_dataset_meta(pose_lifter.dataset_meta) - init_default_scope(pose_lifter.cfg.get('default_scope', 'mmpose')) + if args.input == 'webcam': + input_type = 'webcam' + else: + input_type = mimetypes.guess_type(args.input)[0].split('/')[0] if args.output_root == '': save_out_video = False else: - os.makedirs(args.output_root, exist_ok=True) + mmengine.mkdir_or_exist(args.output_root) + output_file = os.path.join(args.output_root, + os.path.basename(args.input)) + if args.input == 'webcam': + output_file += '.mp4' save_out_video = True + if args.save_predictions: + assert args.output_root != '' + args.pred_save_path = f'{args.output_root}/results_' \ + f'{os.path.splitext(os.path.basename(args.input))[0]}.json' + if save_out_video: fourcc = cv2.VideoWriter_fourcc(*'mp4v') - video_writer = None pose_est_results_list = [] next_id = 0 pose_est_results = [] - video = cv2.VideoCapture(args.input) - assert video.isOpened(), f'Failed to load video file {args.input}' - - (major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.') - if int(major_ver) < 3: - fps = video.get(cv2.cv.CV_CAP_PROP_FPS) - width = video.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) - height = video.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) - else: - fps = video.get(cv2.CAP_PROP_FPS) - width = video.get(cv2.CAP_PROP_FRAME_WIDTH) - height = video.get(cv2.CAP_PROP_FRAME_HEIGHT) - - frame_idx = -1 - - while video.isOpened(): - success, frame = video.read() - frame_idx += 1 - - if not success: - break + if input_type in ['webcam', 'video']: + if args.input == 'webcam': + video = cv2.VideoCapture(0) + else: + video = cv2.VideoCapture(args.input) - pose_est_results_last = pose_est_results + (major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.') + if int(major_ver) < 3: + fps = video.get(cv2.cv.CV_CAP_PROP_FPS) + width = video.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) + height = video.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) + else: + fps = video.get(cv2.CAP_PROP_FPS) + width = video.get(cv2.CAP_PROP_FRAME_WIDTH) + height = video.get(cv2.CAP_PROP_FRAME_HEIGHT) - # First stage: 2D pose detection - # test a single image, the resulting box is (x1, y1, x2, y2) - det_result = inference_detector(detector, frame) - pred_instance = det_result.pred_instances.cpu().numpy() + video_writer = None + pred_instances_list = [] + frame_idx = 0 - bboxes = pred_instance.bboxes - bboxes = bboxes[np.logical_and(pred_instance.labels == args.det_cat_id, - pred_instance.scores > args.bbox_thr)] + while video.isOpened(): + success, frame = video.read() + frame_idx += 1 - if args.use_multi_frames: - frames = collect_multi_frames(video, frame_idx, indices, - args.online) + if not success: + break - # make person results for current image - pose_est_results = inference_topdown( - pose_estimator, frames if args.use_multi_frames else frame, bboxes) + pose_est_results_last = pose_est_results - pose_est_results = get_area(pose_est_results) - if args.use_oks_tracking: - _track = partial(_track_by_oks) - else: - _track = _track_by_iou + # First stage: 2D pose detection + # test a single image, the resulting box is (x1, y1, x2, y2) + det_result = inference_detector(detector, frame) + pred_instance = det_result.pred_instances.cpu().numpy() - for i, result in enumerate(pose_est_results): - track_id, pose_est_results_last, match_result = _track( - result, pose_est_results_last, args.tracking_thr) - if track_id == -1: - pred_instances = result.pred_instances.cpu().numpy() - keypoints = pred_instances.keypoints - if np.count_nonzero(keypoints[:, :, 1]) >= 3: - pose_est_results[i].set_field(next_id, 'track_id') - next_id += 1 - else: - # If the number of keypoints detected is small, - # delete that person instance. - keypoints[:, :, 1] = -10 - pose_est_results[i].pred_instances.set_field( - keypoints, 'keypoints') - bboxes = pred_instances.bboxes * 0 - pose_est_results[i].pred_instances.set_field( - bboxes, 'bboxes') - pose_est_results[i].set_field(-1, 'track_id') - pose_est_results[i].set_field(pred_instances, - 'pred_instances') + bboxes = pred_instance.bboxes + bboxes = bboxes[np.logical_and( + pred_instance.labels == args.det_cat_id, + pred_instance.scores > args.bbox_thr)] + + if args.use_multi_frames: + frames = collect_multi_frames(video, frame_idx, indices, + args.online) + + # make person results for current image + pose_est_results = inference_topdown( + pose_estimator, frames if args.use_multi_frames else frame, + bboxes) + + pose_est_results = get_area(pose_est_results) + if args.use_oks_tracking: + _track = partial(_track_by_oks) else: - pose_est_results[i].set_field(track_id, 'track_id') - - del match_result - - pose_est_results_converted = [] - for pose_est_result in pose_est_results: - pose_est_result_converted = PoseDataSample() - gt_instances = InstanceData() - pred_instances = InstanceData() - for k in pose_est_result.gt_instances.keys(): - gt_instances.set_field(pose_est_result.gt_instances[k], k) - for k in pose_est_result.pred_instances.keys(): - pred_instances.set_field(pose_est_result.pred_instances[k], k) - pose_est_result_converted.gt_instances = gt_instances - pose_est_result_converted.pred_instances = pred_instances - pose_est_result_converted.track_id = pose_est_result.track_id - pose_est_results_converted.append(pose_est_result_converted) - - for i, result in enumerate(pose_est_results_converted): - keypoints = result.pred_instances.keypoints - keypoints = convert_keypoint_definition(keypoints, - pose_det_dataset['type'], - pose_lift_dataset['type']) - pose_est_results_converted[i].pred_instances.keypoints = keypoints - - pose_est_results_list.append(pose_est_results_converted.copy()) - - # extract and pad input pose2d sequence - pose_results_2d = extract_pose_sequence( - pose_est_results_list, - frame_idx=frame_idx, - causal=pose_lift_dataset.get('causal', False), - seq_len=pose_lift_dataset.get('seq_len', 1), - step=pose_lift_dataset.get('seq_step', 1)) - - # Second stage: Pose lifting - # 2D-to-3D pose lifting - pose_lift_results = inference_pose_lifter_model( - pose_lifter, - pose_results_2d, - image_size=(width, height), - norm_pose_2d=args.norm_pose_2d) - - # Pose processing - for idx, pose_lift_res in enumerate(pose_lift_results): - gt_instances = pose_lift_res.gt_instances - - pose_lift_res.track_id = pose_est_results_converted[i].get( - 'track_id', 1e4) - - pred_instances = pose_lift_res.pred_instances - keypoints = pred_instances.keypoints - - keypoints = keypoints[..., [0, 2, 1]] - keypoints[..., 0] = -keypoints[..., 0] - keypoints[..., 2] = -keypoints[..., 2] - - # rebase height (z-axis) - if args.rebase_keypoint_height: - keypoints[..., 2] -= np.min( - keypoints[..., 2], axis=-1, keepdims=True) - - pose_lift_results[i].pred_instances.keypoints = keypoints - - pose_lift_results = sorted( - pose_lift_results, key=lambda x: x.get('track_id', 1e4)) - - pred_3d_data_samples = merge_data_samples(pose_lift_results) - - # Visualization - frame = mmcv.bgr2rgb(frame) - - det_data_sample = merge_data_samples(pose_est_results) - - local_visualizer.add_datasample( - 'result', - frame, - data_sample=pred_3d_data_samples, - det_data_sample=det_data_sample, - draw_gt=False, - det_kpt_color=det_kpt_color, - det_dataset_skeleton=det_dataset_skeleton, - det_dataset_link_color=det_dataset_link_color, - show=args.show, - draw_bbox=True, - kpt_thr=args.kpt_thr, - wait_time=0.001) - - frame_vis = local_visualizer.get_image() - - if save_out_video: - if video_writer is None: - # the size of the image with visualization may vary - # depending on the presence of heatmaps - video_writer = cv2.VideoWriter( - osp.join(args.output_root, - f'vis_{osp.basename(args.input)}'), fourcc, fps, - (frame_vis.shape[1], frame_vis.shape[0])) - - video_writer.write(mmcv.rgb2bgr(frame_vis)) - - video.release() - - if video_writer: - video_writer.release() + _track = _track_by_iou + + for i, result in enumerate(pose_est_results): + track_id, pose_est_results_last, match_result = _track( + result, pose_est_results_last, args.tracking_thr) + if track_id == -1: + pred_instances = result.pred_instances.cpu().numpy() + keypoints = pred_instances.keypoints + if np.count_nonzero(keypoints[:, :, 1]) >= 3: + pose_est_results[i].set_field(next_id, 'track_id') + next_id += 1 + else: + # If the number of keypoints detected is small, + # delete that person instance. + keypoints[:, :, 1] = -10 + pose_est_results[i].pred_instances.set_field( + keypoints, 'keypoints') + bboxes = pred_instances.bboxes * 0 + pose_est_results[i].pred_instances.set_field( + bboxes, 'bboxes') + pose_est_results[i].set_field(-1, 'track_id') + pose_est_results[i].set_field(pred_instances, + 'pred_instances') + else: + pose_est_results[i].set_field(track_id, 'track_id') + + del match_result + + pose_est_results_converted = [] + for pose_est_result in pose_est_results: + pose_est_result_converted = PoseDataSample() + gt_instances = InstanceData() + pred_instances = InstanceData() + for k in pose_est_result.gt_instances.keys(): + gt_instances.set_field(pose_est_result.gt_instances[k], k) + for k in pose_est_result.pred_instances.keys(): + pred_instances.set_field(pose_est_result.pred_instances[k], + k) + pose_est_result_converted.gt_instances = gt_instances + pose_est_result_converted.pred_instances = pred_instances + pose_est_result_converted.track_id = pose_est_result.track_id + + keypoints = convert_keypoint_definition( + pred_instances.keypoints, pose_det_dataset['type'], + pose_lift_dataset['type']) + pose_est_result_converted.pred_instances.keypoints = keypoints + pose_est_results_converted.append(pose_est_result_converted) + + pose_est_results_list.append(pose_est_results_converted.copy()) + + # extract and pad input pose2d sequence + pose_results_2d = extract_pose_sequence( + pose_est_results_list, + frame_idx=frame_idx, + causal=pose_lift_dataset.get('causal', False), + seq_len=pose_lift_dataset.get('seq_len', 1), + step=pose_lift_dataset.get('seq_step', 1)) + + # Second stage: Pose lifting + # 2D-to-3D pose lifting + pose_lift_results = inference_pose_lifter_model( + pose_lifter, + pose_results_2d, + image_size=(width, height), + norm_pose_2d=args.norm_pose_2d) + + # Pose processing + for idx, pose_lift_res in enumerate(pose_lift_results): + gt_instances = pose_lift_res.gt_instances + + pose_lift_res.track_id = pose_est_results_converted[idx].get( + 'track_id', 1e4) + + pred_instances = pose_lift_res.pred_instances + keypoints = pred_instances.keypoints + keypoint_scores = pred_instances.keypoint_scores + if keypoint_scores.ndim == 3: + keypoint_scores = np.squeeze(keypoint_scores, axis=1) + pose_lift_results[ + idx].pred_instances.keypoint_scores = keypoint_scores + if keypoints.ndim == 4: + keypoints = np.squeeze(keypoints, axis=1) + + keypoints = keypoints[..., [0, 2, 1]] + keypoints[..., 0] = -keypoints[..., 0] + keypoints[..., 2] = -keypoints[..., 2] + + # rebase height (z-axis) + if args.rebase_keypoint_height: + keypoints[..., 2] -= np.min( + keypoints[..., 2], axis=-1, keepdims=True) + + pose_lift_results[idx].pred_instances.keypoints = keypoints + + pose_lift_results = sorted( + pose_lift_results, key=lambda x: x.get('track_id', 1e4)) + + pred_3d_data_samples = merge_data_samples(pose_lift_results) + + # Visualization + frame = mmcv.bgr2rgb(frame) + + det_data_sample = merge_data_samples(pose_est_results) + + if local_visualizer is not None: + local_visualizer.add_datasample( + 'result', + frame, + data_sample=pred_3d_data_samples, + det_data_sample=det_data_sample, + draw_gt=False, + det_kpt_color=det_kpt_color, + det_dataset_skeleton=det_dataset_skeleton, + det_dataset_link_color=det_dataset_link_color, + show=args.show, + draw_bbox=True, + kpt_thr=args.kpt_thr, + wait_time=args.show_interval) + + frame_vis = local_visualizer.get_image() + + if args.save_predictions: + # save prediction results + pred_instances_list.append( + dict( + frame_id=frame_idx, + instances=split_instances( + pred_3d_data_samples.get('pred_instances', None)))) + + if save_out_video: + if video_writer is None: + # the size of the image with visualization may vary + # depending on the presence of heatmaps + video_writer = cv2.VideoWriter(output_file, fourcc, fps, + (frame_vis.shape[1], + frame_vis.shape[0])) + + video_writer.write(mmcv.rgb2bgr(frame_vis)) + + # press ESC to exit + if cv2.waitKey(5) & 0xFF == 27: + break + time.sleep(args.show_interval) + + if frame_idx == 50: + break + + video.release() + + if video_writer: + video_writer.release() + else: + args.save_predictions = False + raise ValueError( + f'file {os.path.basename(args.input)} has invalid format.') + + if args.save_predictions: + with open(args.pred_save_path, 'w') as f: + json.dump( + dict( + meta_info=pose_lifter.dataset_meta, + instance_info=pred_instances_list), + f, + indent='\t') + print(f'predictions have been saved at {args.pred_save_path}') if __name__ == '__main__': diff --git a/demo/docs/3d_human_pose_demo.md b/demo/docs/en/3d_human_pose_demo.md similarity index 97% rename from demo/docs/3d_human_pose_demo.md rename to demo/docs/en/3d_human_pose_demo.md index eb2eab92ae..8a6f1d206e 100644 --- a/demo/docs/3d_human_pose_demo.md +++ b/demo/docs/en/3d_human_pose_demo.md @@ -21,12 +21,14 @@ ${MMPOSE_CHECKPOINT_FILE_3D} \ [--rebase-keypoint-height] \ [--norm-pose-2d] \ [--output-root ${OUT_VIDEO_ROOT}] \ +[--save-predictions] \ [--device ${GPU_ID or CPU}] \ [--det-cat-id DET_CAT_ID] \ [--bbox-thr BBOX_THR] \ [--kpt-thr KPT_THR] \ [--use-oks-tracking] \ [--tracking-thr TRACKING_THR] \ +[--show-interval INTERVAL] \ [--thickness THICKNESS] \ [--radius RADIUS] \ [--use-multi-frames] [--online] @@ -54,7 +56,7 @@ configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth \ --input https://user-images.githubusercontent.com/87690686/164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4 \ --output-root vis_results \ ---rebase-keypoint-height +--rebase-keypoint-height --save-predictions ``` During 2D pose detection, for multi-frame inference that rely on extra frames to get the final results of the current frame, try this: diff --git a/demo/docs/zh_cn/3d_human_pose_demo.md b/demo/docs/zh_cn/3d_human_pose_demo.md new file mode 100644 index 0000000000..6ed9dd67de --- /dev/null +++ b/demo/docs/zh_cn/3d_human_pose_demo.md @@ -0,0 +1 @@ +coming soon diff --git a/docs/en/index.rst b/docs/en/index.rst index 61bc1706b6..044b54be0f 100644 --- a/docs/en/index.rst +++ b/docs/en/index.rst @@ -51,6 +51,7 @@ You can change the documentation language at the lower-left corner of the page. model_zoo.txt model_zoo/body_2d_keypoint.md + model_zoo/body_3d_keypoint.md model_zoo/face_2d_keypoint.md model_zoo/hand_2d_keypoint.md model_zoo/wholebody_2d_keypoint.md diff --git a/docs/src/papers/algorithms/vitpose.md b/docs/src/papers/algorithms/vitpose.md index 3c74233dfa..dd218a5f98 100644 --- a/docs/src/papers/algorithms/vitpose.md +++ b/docs/src/papers/algorithms/vitpose.md @@ -8,7 +8,7 @@ ```bibtex @inproceedings{ xu2022vitpose, - title={Vi{TP}ose: Simple Vision Transformer Baselines for Human Pose Estimation}, + title={ViTPose: Simple Vision Transformer Baselines for Human Pose Estimation}, author={Yufei Xu and Jing Zhang and Qiming Zhang and Dacheng Tao}, booktitle={Advances in Neural Information Processing Systems}, year={2022}, diff --git a/docs/zh_cn/index.rst b/docs/zh_cn/index.rst index e38ed72df4..2431d82e4d 100644 --- a/docs/zh_cn/index.rst +++ b/docs/zh_cn/index.rst @@ -51,6 +51,7 @@ You can change the documentation language at the lower-left corner of the page. model_zoo.txt model_zoo/body_2d_keypoint.md + model_zoo/body_3d_keypoint.md model_zoo/face_2d_keypoint.md model_zoo/hand_2d_keypoint.md model_zoo/wholebody_2d_keypoint.md From 9aca147266fcc4fe50ad794538cc454573ad37e0 Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 12 Jun 2023 10:18:29 +0800 Subject: [PATCH 18/52] [Fix] Fix RTMPose performance on coco-wholebody (#2447) --- .../wholebody_2d_keypoint/rtmpose/README.md | 6 +- ...ose-l_8xb32-270e_coco-wholebody-384x288.py | 2 + ...ose-l_8xb64-270e_coco-wholebody-256x192.py | 2 + ...ose-m_8xb64-270e_coco-wholebody-256x192.py | 2 + .../coco-wholebody/rtmpose_coco-wholebody.md | 6 +- .../coco-wholebody/rtmpose_coco-wholebody.yml | 60 +++++++++---------- projects/rtmpose/README.md | 14 ++--- projects/rtmpose/README_CN.md | 14 ++--- ...ose-l_8xb32-270e_coco-wholebody-384x288.py | 2 + ...ose-l_8xb64-270e_coco-wholebody-256x192.py | 2 + ...ose-m_8xb64-270e_coco-wholebody-256x192.py | 2 + 11 files changed, 56 insertions(+), 56 deletions(-) diff --git a/configs/wholebody_2d_keypoint/rtmpose/README.md b/configs/wholebody_2d_keypoint/rtmpose/README.md index 47e488567c..ac40c016aa 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/README.md +++ b/configs/wholebody_2d_keypoint/rtmpose/README.md @@ -13,6 +13,6 @@ Results on COCO-WholeBody v1.0 val with detector having human AP of 56.4 on COCO | Model | Input Size | Whole AP | Whole AR | Details and Download | | :-------: | :--------: | :------: | :------: | :---------------------------------------------------------------------: | -| RTMPose-m | 256x192 | 0.604 | 0.667 | [rtmpose_coco-wholebody.md](./coco-wholebody/rtmpose_coco-wholebody.md) | -| RTMPose-l | 256x192 | 0.632 | 0.694 | [rtmpose_coco-wholebody.md](./coco-wholebody/rtmpose_coco-wholebody.md) | -| RTMPose-l | 384x288 | 0.670 | 0.723 | [rtmpose_coco-wholebody.md](./coco-wholebody/rtmpose_coco-wholebody.md) | +| RTMPose-m | 256x192 | 0.582 | 0.674 | [rtmpose_coco-wholebody.md](./coco-wholebody/rtmpose_coco-wholebody.md) | +| RTMPose-l | 256x192 | 0.611 | 0.700 | [rtmpose_coco-wholebody.md](./coco-wholebody/rtmpose_coco-wholebody.md) | +| RTMPose-l | 384x288 | 0.648 | 0.730 | [rtmpose_coco-wholebody.md](./coco-wholebody/rtmpose_coco-wholebody.md) | diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py index 71715fe97a..af2c133f22 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py @@ -201,6 +201,8 @@ ann_file='annotations/coco_wholebody_val_v1.0.json', data_prefix=dict(img='val2017/'), test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', pipeline=val_pipeline, )) test_dataloader = val_dataloader diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py index bbacb5deba..7765c9ec44 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py @@ -201,6 +201,8 @@ ann_file='annotations/coco_wholebody_val_v1.0.json', data_prefix=dict(img='val2017/'), test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', pipeline=val_pipeline, )) test_dataloader = val_dataloader diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py index 880bfaf774..1e2afc518d 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py @@ -201,6 +201,8 @@ ann_file='annotations/coco_wholebody_val_v1.0.json', data_prefix=dict(img='val2017/'), test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', pipeline=val_pipeline, )) test_dataloader = val_dataloader diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.md b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.md index bdf327d631..e43c0b3750 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.md +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.md @@ -57,6 +57,6 @@ Results on COCO-WholeBody v1.0 val with detector having human AP of 56.4 on COCO | Arch | Input Size | Body AP | Body AR | Foot AP | Foot AR | Face AP | Face AR | Hand AP | Hand AR | Whole AP | Whole AR | ckpt | log | | :-------------------------------------- | :--------: | :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | :------: | :------: | :--------------------------------------: | :-------------------------------------: | -| [rtmpose-m](/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 0.697 | 0.743 | 0.660 | 0.749 | 0.822 | 0.858 | 0.483 | 0.564 | 0.604 | 0.667 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.json) | -| [rtmpose-l](/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 0.721 | 0.764 | 0.693 | 0.780 | 0.844 | 0.876 | 0.523 | 0.600 | 0.632 | 0.694 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.json) | -| [rtmpose-l](/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 0.736 | 0.776 | 0.738 | 0.810 | 0.895 | 0.918 | 0.591 | 0.659 | 0.670 | 0.723 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.json) | +| [rtmpose-m](/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 0.673 | 0.750 | 0.615 | 0.752 | 0.813 | 0.871 | 0.475 | 0.589 | 0.582 | 0.674 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.json) | +| [rtmpose-l](/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 0.695 | 0.769 | 0.658 | 0.785 | 0.833 | 0.887 | 0.519 | 0.628 | 0.611 | 0.700 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.json) | +| [rtmpose-l](/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 0.712 | 0.781 | 0.693 | 0.811 | 0.882 | 0.919 | 0.579 | 0.677 | 0.648 | 0.730 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.json) | diff --git a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.yml b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.yml index 19a3cd0cec..049f348899 100644 --- a/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.yml +++ b/configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose_coco-wholebody.yml @@ -10,16 +10,16 @@ Models: Results: - Dataset: COCO-WholeBody Metrics: - Body AP: 0.697 - Body AR: 0.743 - Face AP: 0.822 - Face AR: 0.858 - Foot AP: 0.66 - Foot AR: 0.749 - Hand AP: 0.483 - Hand AR: 0.564 - Whole AP: 0.604 - Whole AR: 0.667 + Body AP: 0.673 + Body AR: 0.750 + Face AP: 0.813 + Face AR: 0.871 + Foot AP: 0.615 + Foot AR: 0.752 + Hand AP: 0.475 + Hand AR: 0.589 + Whole AP: 0.582 + Whole AR: 0.674 Task: Wholebody 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth - Config: configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py @@ -31,16 +31,16 @@ Models: Results: - Dataset: COCO-WholeBody Metrics: - Body AP: 0.721 - Body AR: 0.764 - Face AP: 0.844 - Face AR: 0.876 - Foot AP: 0.693 - Foot AR: 0.78 - Hand AP: 0.523 - Hand AR: 0.6 - Whole AP: 0.632 - Whole AR: 0.694 + Body AP: 0.695 + Body AR: 0.769 + Face AP: 0.833 + Face AR: 0.887 + Foot AP: 0.658 + Foot AR: 0.785 + Hand AP: 0.519 + Hand AR: 0.628 + Whole AP: 0.611 + Whole AR: 0.700 Task: Wholebody 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth - Config: configs/wholebody_2d_keypoint/rtmpose/coco-wholebody/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py @@ -52,15 +52,15 @@ Models: Results: - Dataset: COCO-WholeBody Metrics: - Body AP: 0.736 - Body AR: 0.776 - Face AP: 0.895 - Face AR: 0.918 - Foot AP: 0.738 - Foot AR: 0.81 - Hand AP: 0.591 - Hand AR: 0.659 - Whole AP: 0.67 - Whole AR: 0.723 + Body AP: 0.712 + Body AR: 0.781 + Face AP: 0.882 + Face AR: 0.919 + Foot AP: 0.693 + Foot AR: 0.811 + Hand AP: 0.579 + Hand AR: 0.677 + Whole AP: 0.648 + Whole AR: 0.730 Task: Wholebody 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index 11f02892bb..22c671cc6b 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -8,12 +8,6 @@
-[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/rtmpose-real-time-multi-person-pose/2d-human-pose-estimation-on-coco-wholebody-1)](https://paperswithcode.com/sota/2d-human-pose-estimation-on-coco-wholebody-1?p=rtmpose-real-time-multi-person-pose) - -
- -
- English | [简体中文](README_CN.md)
@@ -24,7 +18,7 @@ ______________________________________________________________________ Recent studies on 2D pose estimation have achieved excellent performance on public benchmarks, yet its application in the industrial community still suffers from heavy model parameters and high latency. In order to bridge this gap, we empirically study five aspects that affect the performance of multi-person pose estimation algorithms: paradigm, backbone network, localization algorithm, training strategy, and deployment inference, and present a high-performance real-time multi-person pose estimation framework, **RTMPose**, based on MMPose. -Our RTMPose-m achieves **75.8% AP** on COCO with **90+ FPS** on an Intel i7-11700 CPU and **430+ FPS** on an NVIDIA GTX 1660 Ti GPU, and RTMPose-l achieves **67.0% AP** on COCO-WholeBody with **130+ FPS**. +Our RTMPose-m achieves **75.8% AP** on COCO with **90+ FPS** on an Intel i7-11700 CPU and **430+ FPS** on an NVIDIA GTX 1660 Ti GPU. To further evaluate RTMPose's capability in critical real-time applications, we also report the performance after deploying on the mobile device. Our RTMPose-s achieves **72.2% AP** on COCO with **70+ FPS** on a Snapdragon 865 chip, outperforming existing open-source libraries. With the help of MMDeploy, our project supports various platforms like CPU, GPU, NVIDIA Jetson, and mobile devices and multiple inference backends such as ONNXRuntime, TensorRT, ncnn, etc. @@ -233,9 +227,9 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr | Config | Input Size | Whole AP | Whole AR | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | | :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | -| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 60.4 | 66.7 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 63.2 | 69.4 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 67.0 | 72.3 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | +| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 58.2 | 67.4 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 61.1 | 70.0 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 64.8 | 73.0 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | ### Animal 2d (17 Keypoints) diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 0b64021872..57e00a95f2 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -8,12 +8,6 @@
-[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/rtmpose-real-time-multi-person-pose/2d-human-pose-estimation-on-coco-wholebody-1)](https://paperswithcode.com/sota/2d-human-pose-estimation-on-coco-wholebody-1?p=rtmpose-real-time-multi-person-pose) - -
- -
- [English](README.md) | 简体中文
@@ -22,7 +16,7 @@ ______________________________________________________________________ ## Abstract -近年来,2D 姿态估计的研究在公开数据集上取得了出色的成绩,但是它在工业界的应用仍然受到笨重的模型参数和高推理延迟的影响。为了让前沿姿态估计算法在工业界落地,我们通过实验研究了多人姿态估计算法的五个方面:范式、骨干网络、定位算法、训练策略和部署推理,基于 MMPose 提出了一个高性能的实时多人姿态估计框架 **RTMPose**。我们的 RTMPose-m 模型在 COCO 上取得 **75.8%AP**,在 Intel i7-11700 CPU 上达到 **90+FPS**,在 NVIDIA GTX 1660 Ti GPU 上达到 **430+FPS**,RTMPose-l 在 COCO-WholeBody 上达到 **67.0%AP**,**130+FPS**。我们同样验证了在算力有限的设备上做实时姿态估计,RTMPose-s 在移动端骁龙865芯片上可以达到 **COCO 72.2%AP**,**70+FPS**。在 MMDeploy 的帮助下,我们的项目支持 CPU、GPU、Jetson、移动端等多种部署环境。 +近年来,2D 姿态估计的研究在公开数据集上取得了出色的成绩,但是它在工业界的应用仍然受到笨重的模型参数和高推理延迟的影响。为了让前沿姿态估计算法在工业界落地,我们通过实验研究了多人姿态估计算法的五个方面:范式、骨干网络、定位算法、训练策略和部署推理,基于 MMPose 提出了一个高性能的实时多人姿态估计框架 **RTMPose**。我们的 RTMPose-m 模型在 COCO 上取得 **75.8%AP**,在 Intel i7-11700 CPU 上达到 **90+FPS**,在 NVIDIA GTX 1660 Ti GPU 上达到 **430+FPS**。我们同样验证了在算力有限的设备上做实时姿态估计,RTMPose-s 在移动端骁龙865芯片上可以达到 **COCO 72.2%AP**,**70+FPS**。在 MMDeploy 的帮助下,我们的项目支持 CPU、GPU、Jetson、移动端等多种部署环境。 ![rtmpose_intro](https://user-images.githubusercontent.com/13503330/219269619-935499e5-bdd9-49ea-8104-3c7796dbd862.png) @@ -224,9 +218,9 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 | Config | Input Size | Whole AP | Whole AR | FLOPS
(G) | ORT-Latency
(ms)
(i7-11700) | TRT-FP16-Latency
(ms)
(GTX 1660Ti) | Download | | :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | -| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 60.4 | 66.7 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 63.2 | 69.4 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | -| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 67.0 | 72.3 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | +| [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 58.2 | 67.4 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 61.1 | 70.0 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | +| [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 64.8 | 73.0 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | ### 动物 2d 关键点 (17 Keypoints) diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py index 19dc8f8d99..5fd8ce8e1e 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py @@ -202,6 +202,8 @@ ann_file='annotations/coco_wholebody_val_v1.0.json', data_prefix=dict(img='val2017/'), test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', pipeline=val_pipeline, )) test_dataloader = val_dataloader diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py index 7903e09923..f4005028b6 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py @@ -202,6 +202,8 @@ ann_file='annotations/coco_wholebody_val_v1.0.json', data_prefix=dict(img='val2017/'), test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', pipeline=val_pipeline, )) test_dataloader = val_dataloader diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py index 09c7a86840..d0096056a4 100644 --- a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py @@ -202,6 +202,8 @@ ann_file='annotations/coco_wholebody_val_v1.0.json', data_prefix=dict(img='val2017/'), test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', pipeline=val_pipeline, )) test_dataloader = val_dataloader From 9db981765c2b740d21556d3517db87b47916d9e2 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Mon, 12 Jun 2023 10:31:02 +0800 Subject: [PATCH 19/52] [Enhance] Add 3d pose inferencer (#2442) --- .../video_pose_lift/h36m/videopose3d_h36m.yml | 1 + demo/body3d_pose_lifter_demo.py | 89 +-- demo/docs/en/3d_human_pose_demo.md | 17 + demo/inferencer_demo.py | 35 +- docs/en/user_guides/inference.md | 8 +- mmpose/apis/__init__.py | 6 +- mmpose/apis/inference_3d.py | 94 +++- mmpose/apis/inferencers/__init__.py | 6 +- .../inferencers/base_mmpose_inferencer.py | 28 +- mmpose/apis/inferencers/mmpose_inferencer.py | 134 ++--- mmpose/apis/inferencers/pose2d_inferencer.py | 30 +- mmpose/apis/inferencers/pose3d_inferencer.py | 526 ++++++++++++++++++ model-index.yml | 1 + .../test_mmpose_inferencer.py | 36 +- .../test_pose3d_inferencer.py | 124 +++++ 15 files changed, 945 insertions(+), 190 deletions(-) create mode 100644 mmpose/apis/inferencers/pose3d_inferencer.py create mode 100644 tests/test_apis/test_inferencers/test_pose3d_inferencer.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml b/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml index 0703111b1b..3bce33af2d 100644 --- a/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml +++ b/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml @@ -61,6 +61,7 @@ Models: Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth - Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py In Collection: VideoPose3D + Alias: human3d Metadata: Architecture: *id001 Training Data: Human3.6M diff --git a/demo/body3d_pose_lifter_demo.py b/demo/body3d_pose_lifter_demo.py index 39179d3448..f3f8eb58d2 100644 --- a/demo/body3d_pose_lifter_demo.py +++ b/demo/body3d_pose_lifter_demo.py @@ -13,8 +13,9 @@ from mmengine.structures import InstanceData from mmpose.apis import (_track_by_iou, _track_by_oks, collect_multi_frames, - extract_pose_sequence, inference_pose_lifter_model, - inference_topdown, init_model) + convert_keypoint_definition, extract_pose_sequence, + inference_pose_lifter_model, inference_topdown, + init_model) from mmpose.models.pose_estimators import PoseLifter from mmpose.models.pose_estimators.topdown import TopdownPoseEstimator from mmpose.registry import VISUALIZERS @@ -29,90 +30,6 @@ has_mmdet = False -def convert_keypoint_definition(keypoints, pose_det_dataset, - pose_lift_dataset): - """Convert pose det dataset keypoints definition to pose lifter dataset - keypoints definition, so that they are compatible with the definitions - required for 3D pose lifting. - - Args: - keypoints (ndarray[N, K, 2 or 3]): 2D keypoints to be transformed. - pose_det_dataset, (str): Name of the dataset for 2D pose detector. - pose_lift_dataset (str): Name of the dataset for pose lifter model. - - Returns: - ndarray[K, 2 or 3]: the transformed 2D keypoints. - """ - assert pose_lift_dataset in [ - 'Human36mDataset'], '`pose_lift_dataset` should be ' \ - f'`Human36mDataset`, but got {pose_lift_dataset}.' - - coco_style_datasets = [ - 'CocoDataset', 'PoseTrack18VideoDataset', 'PoseTrack18Dataset' - ] - keypoints_new = np.zeros((keypoints.shape[0], 17, keypoints.shape[2]), - dtype=keypoints.dtype) - if pose_lift_dataset == 'Human36mDataset': - if pose_det_dataset in ['Human36mDataset']: - keypoints_new = keypoints - elif pose_det_dataset in coco_style_datasets: - # pelvis (root) is in the middle of l_hip and r_hip - keypoints_new[:, 0] = (keypoints[:, 11] + keypoints[:, 12]) / 2 - # thorax is in the middle of l_shoulder and r_shoulder - keypoints_new[:, 8] = (keypoints[:, 5] + keypoints[:, 6]) / 2 - # spine is in the middle of thorax and pelvis - keypoints_new[:, - 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 - # in COCO, head is in the middle of l_eye and r_eye - # in PoseTrack18, head is in the middle of head_bottom and head_top - keypoints_new[:, 10] = (keypoints[:, 1] + keypoints[:, 2]) / 2 - # rearrange other keypoints - keypoints_new[:, [1, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 15, 16]] = \ - keypoints[:, [12, 14, 16, 11, 13, 15, 0, 5, 7, 9, 6, 8, 10]] - elif pose_det_dataset in ['AicDataset']: - # pelvis (root) is in the middle of l_hip and r_hip - keypoints_new[:, 0] = (keypoints[:, 9] + keypoints[:, 6]) / 2 - # thorax is in the middle of l_shoulder and r_shoulder - keypoints_new[:, 8] = (keypoints[:, 3] + keypoints[:, 0]) / 2 - # spine is in the middle of thorax and pelvis - keypoints_new[:, - 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 - # neck base (top end of neck) is 1/4 the way from - # neck (bottom end of neck) to head top - keypoints_new[:, 9] = (3 * keypoints[:, 13] + keypoints[:, 12]) / 4 - # head (spherical centre of head) is 7/12 the way from - # neck (bottom end of neck) to head top - keypoints_new[:, 10] = (5 * keypoints[:, 13] + - 7 * keypoints[:, 12]) / 12 - - keypoints_new[:, [1, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16]] = \ - keypoints[:, [6, 7, 8, 9, 10, 11, 3, 4, 5, 0, 1, 2]] - elif pose_det_dataset in ['CrowdPoseDataset']: - # pelvis (root) is in the middle of l_hip and r_hip - keypoints_new[:, 0] = (keypoints[:, 6] + keypoints[:, 7]) / 2 - # thorax is in the middle of l_shoulder and r_shoulder - keypoints_new[:, 8] = (keypoints[:, 0] + keypoints[:, 1]) / 2 - # spine is in the middle of thorax and pelvis - keypoints_new[:, - 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 - # neck base (top end of neck) is 1/4 the way from - # neck (bottom end of neck) to head top - keypoints_new[:, 9] = (3 * keypoints[:, 13] + keypoints[:, 12]) / 4 - # head (spherical centre of head) is 7/12 the way from - # neck (bottom end of neck) to head top - keypoints_new[:, 10] = (5 * keypoints[:, 13] + - 7 * keypoints[:, 12]) / 12 - - keypoints_new[:, [1, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16]] = \ - keypoints[:, [7, 9, 11, 6, 8, 10, 0, 2, 4, 1, 3, 5]] - else: - raise NotImplementedError( - f'unsupported conversion between {pose_lift_dataset} and ' - f'{pose_det_dataset}') - - return keypoints_new - - def parse_args(): parser = ArgumentParser() parser.add_argument('det_config', help='Config file for detection') diff --git a/demo/docs/en/3d_human_pose_demo.md b/demo/docs/en/3d_human_pose_demo.md index 8a6f1d206e..be0c84fdec 100644 --- a/demo/docs/en/3d_human_pose_demo.md +++ b/demo/docs/en/3d_human_pose_demo.md @@ -74,3 +74,20 @@ https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_ --rebase-keypoint-height \ --use-multi-frames --online ``` + +### 3D Human Pose Demo with Inferencer + +The Inferencer provides a convenient interface for inference, allowing customization using model aliases instead of configuration files and checkpoint paths. 3D Inferencer only supports video path or webcam as input currently. Below is an example command: + +```shell +python demo/inferencer_demo.py {VIDEO_PATH} \ + --pose3d human3d --vis-out-dir vis_results/human3d +``` + +This command infers the video and saves the visualization results in the `vis_results/human3d` directory. + +Image 1 + +The input video can be downloaded from [Google Drive](https://drive.google.com/file/d/10qEKW15P3-F8xOlAMav5se6fUQTShuT0/view?usp=sharing). + +In addition, the Inferencer supports saving predicted poses. For more information, please refer to the [inferencer document](https://mmpose.readthedocs.io/en/latest/user_guides/inference.html#inferencer-a-unified-inference-interface). diff --git a/demo/inferencer_demo.py b/demo/inferencer_demo.py index 73bd9c5bc3..d7bbbb5b52 100644 --- a/demo/inferencer_demo.py +++ b/demo/inferencer_demo.py @@ -25,6 +25,19 @@ def parse_args(): help='Path to the custom checkpoint file of the selected pose model. ' 'If it is not specified and "pose2d" is a model name of metafile, ' 'the weights will be loaded from metafile.') + parser.add_argument( + '--pose3d', + type=str, + default=None, + help='Pretrained 3D pose estimation algorithm. It\'s the path to the ' + 'config file or the model name defined in metafile.') + parser.add_argument( + '--pose3d-weights', + type=str, + default=None, + help='Path to the custom checkpoint file of the selected pose model. ' + 'If it is not specified and "pose3d" is a model name of metafile, ' + 'the weights will be loaded from metafile.') parser.add_argument( '--det-model', type=str, @@ -77,6 +90,26 @@ def parse_args(): help='IoU threshold for bounding box NMS') parser.add_argument( '--kpt-thr', type=float, default=0.3, help='Keypoint score threshold') + parser.add_argument( + '--tracking-thr', type=float, default=0.3, help='Tracking threshold') + parser.add_argument( + '--use-oks-tracking', + action='store_true', + help='Whether to use OKS as similarity in tracking') + parser.add_argument( + '--norm-pose-2d', + action='store_true', + help='Scale the bbox (along with the 2D pose) to the average bbox ' + 'scale of the dataset, and move the bbox (along with the 2D pose) to ' + 'the average bbox center of the dataset. This is useful when bbox ' + 'is small, especially in multi-person scenarios.') + parser.add_argument( + '--rebase-keypoint-height', + action='store_true', + help='Rebase the predicted 3D pose so its lowest keypoint has a ' + 'height of 0 (landing on the ground). This is useful for ' + 'visualization when the model do not predict the global position ' + 'of the 3D pose.') parser.add_argument( '--radius', type=int, @@ -106,7 +139,7 @@ def parse_args(): init_kws = [ 'pose2d', 'pose2d_weights', 'scope', 'device', 'det_model', - 'det_weights', 'det_cat_ids' + 'det_weights', 'det_cat_ids', 'pose3d', 'pose3d_weights' ] init_args = {} init_args['output_heatmaps'] = call_args.pop('draw_heatmap') diff --git a/docs/en/user_guides/inference.md b/docs/en/user_guides/inference.md index 737aa87106..cfb960be3b 100644 --- a/docs/en/user_guides/inference.md +++ b/docs/en/user_guides/inference.md @@ -196,7 +196,7 @@ The inferencer is designed to handle both visualization and saving of prediction ### Model Alias -MMPose provides a set of pre-defined aliases for commonly used models. These aliases can be used as shorthand when initializing the `MMPoseInferencer` instead of specifying the full model configuration name. Below is a list of the available model aliases and their corresponding configuration names: +The MMPose library has predefined aliases for several frequently used models. These aliases can be utilized as a shortcut when initializing the `MMPoseInferencer`, as an alternative to providing the full model configuration name. Here are the available 2D model aliases and their corresponding configuration names: | Alias | Configuration Name | Task | Pose Estimator | Detector | | --------- | -------------------------------------------------- | ------------------------------- | -------------- | ------------------- | @@ -211,6 +211,12 @@ MMPose provides a set of pre-defined aliases for commonly used models. These ali | vitpose-l | td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192 | Human pose estimation | ViTPose-large | RTMDet-m | | vitpose-h | td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192 | Human pose estimation | ViTPose-huge | RTMDet-m | +The following table lists the available 3D model aliases and their corresponding configuration names: + +| Alias | Configuration Name | Task | 3D Pose Estimator | 2D Pose Estimator | Detector | +| ------- | ------------------------------------------------------ | ------------------------ | ----------------- | ----------------- | -------- | +| human3d | vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m | Human 3D pose estimation | VideoPose3D | RTMPose-m | RTMDet-m | + In addition, users can utilize the CLI tool to display all available aliases with the following command: ```shell diff --git a/mmpose/apis/__init__.py b/mmpose/apis/__init__.py index dcce33742c..0c44f7a3f8 100644 --- a/mmpose/apis/__init__.py +++ b/mmpose/apis/__init__.py @@ -1,7 +1,8 @@ # Copyright (c) OpenMMLab. All rights reserved. from .inference import (collect_multi_frames, inference_bottomup, inference_topdown, init_model) -from .inference_3d import extract_pose_sequence, inference_pose_lifter_model +from .inference_3d import (collate_pose_sequence, convert_keypoint_definition, + extract_pose_sequence, inference_pose_lifter_model) from .inference_tracking import _compute_iou, _track_by_iou, _track_by_oks from .inferencers import MMPoseInferencer, Pose2DInferencer @@ -9,5 +10,6 @@ 'init_model', 'inference_topdown', 'inference_bottomup', 'collect_multi_frames', 'Pose2DInferencer', 'MMPoseInferencer', '_track_by_iou', '_track_by_oks', '_compute_iou', - 'inference_pose_lifter_model', 'extract_pose_sequence' + 'inference_pose_lifter_model', 'extract_pose_sequence', + 'convert_keypoint_definition', 'collate_pose_sequence' ] diff --git a/mmpose/apis/inference_3d.py b/mmpose/apis/inference_3d.py index 5fbc934adc..d5bb753945 100644 --- a/mmpose/apis/inference_3d.py +++ b/mmpose/apis/inference_3d.py @@ -8,6 +8,90 @@ from mmpose.structures import PoseDataSample +def convert_keypoint_definition(keypoints, pose_det_dataset, + pose_lift_dataset): + """Convert pose det dataset keypoints definition to pose lifter dataset + keypoints definition, so that they are compatible with the definitions + required for 3D pose lifting. + + Args: + keypoints (ndarray[N, K, 2 or 3]): 2D keypoints to be transformed. + pose_det_dataset, (str): Name of the dataset for 2D pose detector. + pose_lift_dataset (str): Name of the dataset for pose lifter model. + + Returns: + ndarray[K, 2 or 3]: the transformed 2D keypoints. + """ + assert pose_lift_dataset in [ + 'Human36mDataset'], '`pose_lift_dataset` should be ' \ + f'`Human36mDataset`, but got {pose_lift_dataset}.' + + coco_style_datasets = [ + 'CocoDataset', 'PoseTrack18VideoDataset', 'PoseTrack18Dataset' + ] + keypoints_new = np.zeros((keypoints.shape[0], 17, keypoints.shape[2]), + dtype=keypoints.dtype) + if pose_lift_dataset == 'Human36mDataset': + if pose_det_dataset in ['Human36mDataset']: + keypoints_new = keypoints + elif pose_det_dataset in coco_style_datasets: + # pelvis (root) is in the middle of l_hip and r_hip + keypoints_new[:, 0] = (keypoints[:, 11] + keypoints[:, 12]) / 2 + # thorax is in the middle of l_shoulder and r_shoulder + keypoints_new[:, 8] = (keypoints[:, 5] + keypoints[:, 6]) / 2 + # spine is in the middle of thorax and pelvis + keypoints_new[:, + 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 + # in COCO, head is in the middle of l_eye and r_eye + # in PoseTrack18, head is in the middle of head_bottom and head_top + keypoints_new[:, 10] = (keypoints[:, 1] + keypoints[:, 2]) / 2 + # rearrange other keypoints + keypoints_new[:, [1, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 15, 16]] = \ + keypoints[:, [12, 14, 16, 11, 13, 15, 0, 5, 7, 9, 6, 8, 10]] + elif pose_det_dataset in ['AicDataset']: + # pelvis (root) is in the middle of l_hip and r_hip + keypoints_new[:, 0] = (keypoints[:, 9] + keypoints[:, 6]) / 2 + # thorax is in the middle of l_shoulder and r_shoulder + keypoints_new[:, 8] = (keypoints[:, 3] + keypoints[:, 0]) / 2 + # spine is in the middle of thorax and pelvis + keypoints_new[:, + 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 + # neck base (top end of neck) is 1/4 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 9] = (3 * keypoints[:, 13] + keypoints[:, 12]) / 4 + # head (spherical centre of head) is 7/12 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 10] = (5 * keypoints[:, 13] + + 7 * keypoints[:, 12]) / 12 + + keypoints_new[:, [1, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16]] = \ + keypoints[:, [6, 7, 8, 9, 10, 11, 3, 4, 5, 0, 1, 2]] + elif pose_det_dataset in ['CrowdPoseDataset']: + # pelvis (root) is in the middle of l_hip and r_hip + keypoints_new[:, 0] = (keypoints[:, 6] + keypoints[:, 7]) / 2 + # thorax is in the middle of l_shoulder and r_shoulder + keypoints_new[:, 8] = (keypoints[:, 0] + keypoints[:, 1]) / 2 + # spine is in the middle of thorax and pelvis + keypoints_new[:, + 7] = (keypoints_new[:, 0] + keypoints_new[:, 8]) / 2 + # neck base (top end of neck) is 1/4 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 9] = (3 * keypoints[:, 13] + keypoints[:, 12]) / 4 + # head (spherical centre of head) is 7/12 the way from + # neck (bottom end of neck) to head top + keypoints_new[:, 10] = (5 * keypoints[:, 13] + + 7 * keypoints[:, 12]) / 12 + + keypoints_new[:, [1, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16]] = \ + keypoints[:, [7, 9, 11, 6, 8, 10, 0, 2, 4, 1, 3, 5]] + else: + raise NotImplementedError( + f'unsupported conversion between {pose_lift_dataset} and ' + f'{pose_det_dataset}') + + return keypoints_new + + def extract_pose_sequence(pose_results, frame_idx, causal, seq_len, step=1): """Extract the target frame from 2D pose results, and pad the sequence to a fixed length. @@ -45,9 +129,9 @@ def extract_pose_sequence(pose_results, frame_idx, causal, seq_len, step=1): return pose_results_seq -def _collate_pose_sequence(pose_results_2d, - with_track_id=True, - target_frame=-1): +def collate_pose_sequence(pose_results_2d, + with_track_id=True, + target_frame=-1): """Reorganize multi-frame pose detection results into individual pose sequences. @@ -211,8 +295,8 @@ def inference_pose_lifter_model(model, pose_results_2d[i][j].pred_instances.keypoints = np.array( keypoints) - pose_sequences_2d = _collate_pose_sequence(pose_results_2d, with_track_id, - target_idx) + pose_sequences_2d = collate_pose_sequence(pose_results_2d, with_track_id, + target_idx) if not pose_sequences_2d: return [] diff --git a/mmpose/apis/inferencers/__init__.py b/mmpose/apis/inferencers/__init__.py index 3db192da73..5955d79da9 100644 --- a/mmpose/apis/inferencers/__init__.py +++ b/mmpose/apis/inferencers/__init__.py @@ -1,6 +1,10 @@ # Copyright (c) OpenMMLab. All rights reserved. from .mmpose_inferencer import MMPoseInferencer from .pose2d_inferencer import Pose2DInferencer +from .pose3d_inferencer import Pose3DInferencer from .utils import get_model_aliases -__all__ = ['Pose2DInferencer', 'MMPoseInferencer', 'get_model_aliases'] +__all__ = [ + 'Pose2DInferencer', 'MMPoseInferencer', 'get_model_aliases', + 'Pose3DInferencer' +] diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index bf41d821a0..f914793086 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -16,6 +16,7 @@ from mmengine.fileio import (get_file_backend, isdir, join_path, list_dir_or_file) from mmengine.infer.infer import BaseInferencer +from mmengine.registry import init_default_scope from mmengine.runner.checkpoint import _load_checkpoint_to_model from mmengine.structures import InstanceData from mmengine.utils import mkdir_or_exist @@ -184,8 +185,22 @@ def _get_webcam_inputs(self, inputs: str) -> Generator: # Set video input flag and metadata. self._video_input = True + (major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.') + if int(major_ver) < 3: + fps = vcap.get(cv2.cv.CV_CAP_PROP_FPS) + width = vcap.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) + height = vcap.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) + else: + fps = vcap.get(cv2.CAP_PROP_FPS) + width = vcap.get(cv2.CAP_PROP_FRAME_WIDTH) + height = vcap.get(cv2.CAP_PROP_FRAME_HEIGHT) self.video_info = dict( - fps=10, name='webcam.mp4', writer=None, predictions=[]) + fps=fps, + name='webcam.mp4', + writer=None, + width=width, + height=height, + predictions=[]) def _webcam_reader() -> Generator: while True: @@ -215,6 +230,7 @@ def _init_pipeline(self, cfg: ConfigType) -> Callable: ``np.ndarray``. The returned pipeline will be used to process a single data. """ + init_default_scope(cfg.get('default_scope', 'mmpose')) return Compose(cfg.test_dataloader.dataset.pipeline) def preprocess(self, @@ -234,7 +250,7 @@ def preprocess(self, """ for i, input in enumerate(inputs): - bbox = bboxes[i] if bboxes is not None else [] + bbox = bboxes[i] if bboxes else [] data_infos = self.preprocess_single( input, index=i, bboxes=bbox, **kwargs) # only supports inference with batch size 1 @@ -402,8 +418,16 @@ def postprocess( if pred_out_dir != '': for pred, data_sample in zip(result_dict['predictions'], preds): if self._video_input: + # For video or webcam input, predictions for each frame + # are gathered in the 'predictions' key of 'video_info' + # dictionary. All frame predictions are then stored into + # a single file after processing all frames. self.video_info['predictions'].append(pred) else: + # For non-video inputs, predictions are stored in separate + # JSON files. The filename is determined by the basename + # of the input image path with a '.json' extension. The + # predictions are then dumped into this file. fname = os.path.splitext( os.path.basename( data_sample.metainfo['img_path']))[0] + '.json' diff --git a/mmpose/apis/inferencers/mmpose_inferencer.py b/mmpose/apis/inferencers/mmpose_inferencer.py index 3d5ac222bb..d7050272f6 100644 --- a/mmpose/apis/inferencers/mmpose_inferencer.py +++ b/mmpose/apis/inferencers/mmpose_inferencer.py @@ -7,11 +7,10 @@ from mmengine.config import Config, ConfigDict from mmengine.infer.infer import ModelType from mmengine.structures import InstanceData -from rich.progress import track -from mmpose.structures import PoseDataSample from .base_mmpose_inferencer import BaseMMPoseInferencer from .pose2d_inferencer import Pose2DInferencer +from .pose3d_inferencer import Pose3DInferencer InstanceList = List[InstanceData] InputType = Union[str, np.ndarray] @@ -55,8 +54,11 @@ class MMPoseInferencer(BaseMMPoseInferencer): config will be used. Default is None. """ - preprocess_kwargs: set = {'bbox_thr', 'nms_thr', 'bboxes'} - forward_kwargs: set = set() + preprocess_kwargs: set = { + 'bbox_thr', 'nms_thr', 'bboxes', 'use_oks_tracking', 'tracking_thr', + 'norm_pose_2d' + } + forward_kwargs: set = {'rebase_keypoint_height'} visualize_kwargs: set = { 'return_vis', 'show', @@ -72,6 +74,8 @@ class MMPoseInferencer(BaseMMPoseInferencer): def __init__(self, pose2d: Optional[str] = None, pose2d_weights: Optional[str] = None, + pose3d: Optional[str] = None, + pose3d_weights: Optional[str] = None, device: Optional[str] = None, scope: str = 'mmpose', det_model: Optional[Union[ModelType, str]] = None, @@ -79,15 +83,19 @@ def __init__(self, det_cat_ids: Optional[Union[int, List]] = None, output_heatmaps: Optional[bool] = None) -> None: - if pose2d is None: - raise ValueError('2d pose estimation algorithm should provided.') - self.visualizer = None - self.inferencers = dict() - if pose2d is not None: - self.inferencers['pose2d'] = Pose2DInferencer( - pose2d, pose2d_weights, device, scope, det_model, det_weights, - det_cat_ids, output_heatmaps) + if pose3d is not None: + self.inferencer = Pose3DInferencer(pose3d, pose3d_weights, pose2d, + pose2d_weights, device, scope, + det_model, det_weights, + det_cat_ids) + elif pose2d is not None: + self.inferencer = Pose2DInferencer(pose2d, pose2d_weights, device, + scope, det_model, det_weights, + det_cat_ids, output_heatmaps) + else: + raise ValueError('Either 2d or 3d pose estimation algorithm ' + 'should be provided.') def preprocess(self, inputs: InputsType, batch_size: int = 1, **kwargs): """Process the inputs into a model-feedable format. @@ -103,11 +111,9 @@ def preprocess(self, inputs: InputsType, batch_size: int = 1, **kwargs): for i, input in enumerate(inputs): data_batch = {} - if 'pose2d' in self.inferencers: - data_infos = self.inferencers['pose2d'].preprocess_single( - input, index=i, **kwargs) - data_batch['pose2d'] = self.inferencers['pose2d'].collate_fn( - data_infos) + data_infos = self.inferencer.preprocess_single( + input, index=i, **kwargs) + data_batch = self.inferencer.collate_fn(data_infos) # only supports inference with batch size 1 yield data_batch, [input] @@ -121,11 +127,7 @@ def forward(self, inputs: InputType, **forward_kwargs) -> PredType: Returns: Dict: The prediction results. Possibly with keys "pose2d". """ - result = {} - for mode, inferencer in self.inferencers.items(): - result[mode] = inferencer.forward(inputs[mode], **forward_kwargs) - - return result + return self.inferencer.forward(inputs, **forward_kwargs) def __call__( self, @@ -159,6 +161,15 @@ def __call__( kwargs['vis_out_dir'] = f'{out_dir}/visualizations' if 'pred_out_dir' not in kwargs: kwargs['pred_out_dir'] = f'{out_dir}/predictions' + + kwargs = { + key: value + for key, value in kwargs.items() + if key in set.union(self.inferencer.preprocess_kwargs, + self.inferencer.forward_kwargs, + self.inferencer.visualize_kwargs, + self.inferencer.postprocess_kwargs) + } ( preprocess_kwargs, forward_kwargs, @@ -168,29 +179,26 @@ def __call__( # preprocessing if isinstance(inputs, str) and inputs.startswith('webcam'): - inputs = self._get_webcam_inputs(inputs) + inputs = self.inferencer._get_webcam_inputs(inputs) batch_size = 1 if not visualize_kwargs.get('show', False): warnings.warn('The display mode is closed when using webcam ' 'input. It will be turned on automatically.') visualize_kwargs['show'] = True else: - inputs = self._inputs_to_list(inputs) + inputs = self.inferencer._inputs_to_list(inputs) + self._video_input = self.inferencer._video_input + if self._video_input: + self.video_info = self.inferencer.video_info inputs = self.preprocess( inputs, batch_size=batch_size, **preprocess_kwargs) # forward - forward_kwargs['bbox_thr'] = preprocess_kwargs.get('bbox_thr', -1) - for inferencer in self.inferencers.values(): - inferencer._video_input = self._video_input - if self._video_input: - inferencer.video_info = self.video_info + if 'bbox_thr' in self.inferencer.forward_kwargs: + forward_kwargs['bbox_thr'] = preprocess_kwargs.get('bbox_thr', -1) preds = [] - if 'pose2d' not in self.inferencers or not hasattr( - self.inferencers['pose2d'], 'detector'): - inputs = track(inputs, description='Inference') for proc_inputs, ori_inputs in inputs: preds = self.forward(proc_inputs, **forward_kwargs) @@ -227,55 +235,13 @@ def visualize(self, inputs: InputsType, preds: PredType, Returns: List[np.ndarray]: Visualization results. """ - - if 'pose2d' in self.inferencers: - window_name = '' - if self._video_input: - window_name = self.video_info['name'] - return self.inferencers['pose2d'].visualize( - inputs, - preds['pose2d'], - window_name=window_name, - window_close_event_handler=self._visualization_window_on_close, - **kwargs) - - def postprocess( - self, - preds: List[PoseDataSample], - visualization: List[np.ndarray], - return_datasample=False, - pred_out_dir: str = '', - ) -> dict: - """Process the predictions and visualization results from ``forward`` - and ``visualize``. - - This method should be responsible for the following tasks: - - 1. Convert datasamples into a json-serializable dict if needed. - 2. Pack the predictions and visualization results and return them. - 3. Dump or log the predictions. - - Args: - preds (List[Dict]): Predictions of the model. - visualization (np.ndarray): Visualized predictions. - return_datasample (bool): Whether to return results as - datasamples. Defaults to False. - pred_out_dir (str): Directory to save the inference results w/o - visualization. If left as empty, no file will be saved. - Defaults to ''. - - Returns: - dict: Inference and visualization results with key ``predictions`` - and ``visualization`` - - - ``visualization (Any)``: Returned by :meth:`visualize` - - ``predictions`` (dict or DataSample): Returned by - :meth:`forward` and processed in :meth:`postprocess`. - If ``return_datasample=False``, it usually should be a - json-serializable dict containing only basic data elements such - as strings and numbers. - """ - - if 'pose2d' in self.inferencers: - return super().postprocess(preds['pose2d'], visualization, - return_datasample, pred_out_dir) + window_name = '' + if self.inferencer._video_input: + window_name = self.inferencer.video_info['name'] + + return self.inferencer.visualize( + inputs, + preds, + window_name=window_name, + window_close_event_handler=self._visualization_window_on_close, + **kwargs) diff --git a/mmpose/apis/inferencers/pose2d_inferencer.py b/mmpose/apis/inferencers/pose2d_inferencer.py index b35abddb19..1e8e8d7550 100644 --- a/mmpose/apis/inferencers/pose2d_inferencer.py +++ b/mmpose/apis/inferencers/pose2d_inferencer.py @@ -11,7 +11,6 @@ from mmengine.model import revert_sync_batchnorm from mmengine.registry import init_default_scope from mmengine.structures import InstanceData -from rich.progress import track from mmpose.evaluation.functional import nms from mmpose.registry import DATASETS, INFERENCERS @@ -67,7 +66,7 @@ class Pose2DInferencer(BaseMMPoseInferencer): """ preprocess_kwargs: set = {'bbox_thr', 'nms_thr', 'bboxes'} - forward_kwargs: set = set() + forward_kwargs: set = {'merge_results'} visualize_kwargs: set = { 'return_vis', 'show', @@ -205,9 +204,28 @@ def preprocess_single(self, return data_infos @torch.no_grad() - def forward(self, inputs: Union[dict, tuple], bbox_thr=-1): - data_samples = super().forward(inputs) - if self.cfg.data_mode == 'topdown': + def forward(self, + inputs: Union[dict, tuple], + merge_results: bool = True, + bbox_thr: float = -1): + """Performs a forward pass through the model. + + Args: + inputs (Union[dict, tuple]): The input data to be processed. Can + be either a dictionary or a tuple. + merge_results (bool, optional): Whether to merge data samples, + default to True. This is only applicable when the data_mode + is 'topdown'. + bbox_thr (float, optional): A threshold for the bounding box + scores. Bounding boxes with scores greater than this value + will be retained. Default value is -1 which retains all + bounding boxes. + + Returns: + A list of data samples with prediction instances. + """ + data_samples = self.model.test_step(inputs) + if self.cfg.data_mode == 'topdown' and merge_results: data_samples = [merge_data_samples(data_samples)] if bbox_thr > 0: for ds in data_samples: @@ -272,8 +290,6 @@ def __call__( inputs, batch_size=batch_size, **preprocess_kwargs) preds = [] - if not hasattr(self, 'detector'): - inputs = track(inputs, description='Inference') for proc_inputs, ori_inputs in inputs: preds = self.forward(proc_inputs, **forward_kwargs) diff --git a/mmpose/apis/inferencers/pose3d_inferencer.py b/mmpose/apis/inferencers/pose3d_inferencer.py new file mode 100644 index 0000000000..d30302cfa2 --- /dev/null +++ b/mmpose/apis/inferencers/pose3d_inferencer.py @@ -0,0 +1,526 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import mimetypes +import os +import warnings +from collections import defaultdict +from functools import partial +from typing import (Callable, Dict, Iterable, List, Optional, Sequence, Tuple, + Union) + +import cv2 +import mmcv +import numpy as np +import torch +from mmengine.config import Config, ConfigDict +from mmengine.fileio import join_path +from mmengine.infer.infer import ModelType +from mmengine.model import revert_sync_batchnorm +from mmengine.registry import init_default_scope +from mmengine.structures import InstanceData +from mmengine.utils import mkdir_or_exist + +from mmpose.apis import (_track_by_iou, _track_by_oks, collate_pose_sequence, + convert_keypoint_definition, extract_pose_sequence) +from mmpose.registry import INFERENCERS +from mmpose.structures import PoseDataSample, merge_data_samples +from .base_mmpose_inferencer import BaseMMPoseInferencer +from .pose2d_inferencer import Pose2DInferencer + +InstanceList = List[InstanceData] +InputType = Union[str, np.ndarray] +InputsType = Union[InputType, Sequence[InputType]] +PredType = Union[InstanceData, InstanceList] +ImgType = Union[np.ndarray, Sequence[np.ndarray]] +ConfigType = Union[Config, ConfigDict] +ResType = Union[Dict, List[Dict], InstanceData, List[InstanceData]] + + +@INFERENCERS.register_module(name='pose-estimation-3d') +@INFERENCERS.register_module() +class Pose3DInferencer(BaseMMPoseInferencer): + """The inferencer for 3D pose estimation. + + Args: + model (str, optional): Pretrained 2D pose estimation algorithm. + It's the path to the config file or the model name defined in + metafile. For example, it could be: + + - model alias, e.g. ``'body'``, + - config name, e.g. ``'simcc_res50_8xb64-210e_coco-256x192'``, + - config path + + Defaults to ``None``. + weights (str, optional): Path to the checkpoint. If it is not + specified and "model" is a model name of metafile, the weights + will be loaded from metafile. Defaults to None. + device (str, optional): Device to run inference. If None, the + available device will be automatically used. Defaults to None. + scope (str, optional): The scope of the model. Defaults to "mmpose". + det_model (str, optional): Config path or alias of detection model. + Defaults to None. + det_weights (str, optional): Path to the checkpoints of detection + model. Defaults to None. + det_cat_ids (int or list[int], optional): Category id for + detection model. Defaults to None. + output_heatmaps (bool, optional): Flag to visualize predicted + heatmaps. If set to None, the default setting from the model + config will be used. Default is None. + """ + + preprocess_kwargs: set = { + 'bbox_thr', 'nms_thr', 'bboxes', 'use_oks_tracking', 'tracking_thr', + 'norm_pose_2d' + } + forward_kwargs: set = {'rebase_keypoint_height'} + visualize_kwargs: set = { + 'return_vis', + 'show', + 'wait_time', + 'draw_bbox', + 'radius', + 'thickness', + 'kpt_thr', + 'vis_out_dir', + } + postprocess_kwargs: set = {'pred_out_dir'} + + def __init__(self, + model: Union[ModelType, str], + weights: Optional[str] = None, + pose2d_model: Optional[Union[ModelType, str]] = None, + pose2d_weights: Optional[str] = None, + device: Optional[str] = None, + scope: Optional[str] = 'mmpose', + det_model: Optional[Union[ModelType, str]] = None, + det_weights: Optional[str] = None, + det_cat_ids: Optional[Union[int, Tuple]] = None) -> None: + + init_default_scope(scope) + super().__init__( + model=model, weights=weights, device=device, scope=scope) + self.model = revert_sync_batchnorm(self.model) + + # assign dataset metainfo to self.visualizer + self.visualizer.set_dataset_meta(self.model.dataset_meta) + + # initialize 2d pose estimator + self.pose2d_model = Pose2DInferencer( + pose2d_model if pose2d_model else 'human', pose2d_weights, device, + scope, det_model, det_weights, det_cat_ids) + + # helper functions + self._keypoint_converter = partial( + convert_keypoint_definition, + pose_det_dataset=self.pose2d_model.cfg.test_dataloader. + dataset['type'], + pose_lift_dataset=self.cfg.test_dataloader.dataset['type'], + ) + + self._pose_seq_extractor = partial( + extract_pose_sequence, + causal=self.cfg.test_dataloader.dataset.get('causal', False), + seq_len=self.cfg.test_dataloader.dataset.get('seq_len', 1), + step=self.cfg.test_dataloader.dataset.get('seq_step', 1)) + + self._video_input = False + self._buffer = defaultdict(list) + + def _inputs_to_list(self, inputs: InputsType) -> Iterable: + """Preprocess the inputs to a listaccording to its type + Args: + inputs (InputsType): Inputs for the inferencer. + + Returns: + list: List of input for the :meth:`preprocess`. + """ + self._video_input = False + + if isinstance(inputs, str) and not os.path.isdir(inputs) and \ + mimetypes.guess_type(inputs)[0].split('/')[0] == 'video': + + self._video_input = True + video = mmcv.VideoReader(inputs) + self.video_info = dict( + fps=video.fps, + name=os.path.basename(inputs), + writer=None, + width=video.width, + height=video.height, + predictions=[]) + inputs = video + else: + raise ValueError(f'Pose 3d inferencer expects input to be a ' + f'video path, but received {inputs}.') + + return inputs + + def preprocess_single(self, + input: InputType, + index: int, + bbox_thr: float = 0.3, + nms_thr: float = 0.3, + bboxes: Union[List[List], List[np.ndarray], + np.ndarray] = [], + use_oks_tracking: bool = False, + tracking_thr: float = 0.3, + norm_pose_2d: bool = False): + """Process a single input into a model-feedable format. + + Args: + input (InputType): The input provided by the user. + index (int): The index of the input. + bbox_thr (float, optional): The threshold for bounding box + detection. Defaults to 0.3. + nms_thr (float, optional): The Intersection over Union (IoU) + threshold for bounding box Non-Maximum Suppression (NMS). + Defaults to 0.3. + bboxes (Union[List[List], List[np.ndarray], np.ndarray]): + The bounding boxes to use. Defaults to []. + use_oks_tracking (bool, optional): A flag that indicates + whether OKS-based tracking should be used. Defaults to False. + tracking_thr (float, optional): The threshold for tracking. + Defaults to 0.3. + norm_pose_2d (bool, optional): A flag that indicates whether 2D + pose normalization should be used. Defaults to False. + + Yields: + Any: The data processed by the pipeline and collate_fn. + + This method first calculates 2D keypoints using the provided + pose2d_model. The method also performs instance matching, which + can use either OKS-based tracking or IOU-based tracking. + """ + + # calculate 2d keypoints + results_pose2d = next( + self.pose2d_model( + input, + bbox_thr=bbox_thr, + nms_thr=nms_thr, + bboxes=bboxes, + merge_results=False, + return_datasample=True))['predictions'] + + for ds in results_pose2d: + ds.pred_instances.set_field( + (ds.pred_instances.bboxes[..., 2:] - + ds.pred_instances.bboxes[..., :2]).prod(-1), 'areas') + + # instance matching + if use_oks_tracking: + _track = partial(_track_by_oks) + else: + _track = _track_by_iou + + for result in results_pose2d: + track_id, self._buffer['results_pose2d_last'], _ = _track( + result, self._buffer['results_pose2d_last'], tracking_thr) + if track_id == -1: + pred_instances = result.pred_instances.cpu().numpy() + keypoints = pred_instances.keypoints + if np.count_nonzero(keypoints[:, :, 1]) >= 3: + next_id = self._buffer.get('next_id', 0) + result.set_field(next_id, 'track_id') + self._buffer['next_id'] = next_id + 1 + else: + # If the number of keypoints detected is small, + # delete that person instance. + result.pred_instances.keypoints[..., 1] = -10 + result.pred_instances.bboxes *= 0 + result.set_field(-1, 'track_id') + else: + result.set_field(track_id, 'track_id') + self._buffer['pose2d_results'] = merge_data_samples(results_pose2d) + + # convert keypoints + results_pose2d_converted = [ds.cpu().numpy() for ds in results_pose2d] + for ds in results_pose2d_converted: + ds.pred_instances.keypoints = self._keypoint_converter( + ds.pred_instances.keypoints) + self._buffer['pose_est_results_list'].append(results_pose2d_converted) + + # extract and pad input pose2d sequence + pose_results_2d = self._pose_seq_extractor( + self._buffer['pose_est_results_list'], frame_idx=index) + causal = self.cfg.test_dataloader.dataset.get('causal', False) + target_idx = -1 if causal else len(pose_results_2d) // 2 + + stats_info = self.model.dataset_meta.get('stats_info', {}) + bbox_center = stats_info.get('bbox_center', None) + bbox_scale = stats_info.get('bbox_scale', None) + + for i, pose_res in enumerate(pose_results_2d): + for j, data_sample in enumerate(pose_res): + kpts = data_sample.pred_instances.keypoints + bboxes = data_sample.pred_instances.bboxes + keypoints = [] + for k in range(len(kpts)): + kpt = kpts[k] + if norm_pose_2d: + bbox = bboxes[k] + center = np.array([[(bbox[0] + bbox[2]) / 2, + (bbox[1] + bbox[3]) / 2]]) + scale = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) + keypoints.append((kpt[:, :2] - center) / scale * + bbox_scale + bbox_center) + else: + keypoints.append(kpt[:, :2]) + pose_results_2d[i][j].pred_instances.keypoints = np.array( + keypoints) + pose_sequences_2d = collate_pose_sequence(pose_results_2d, True, + target_idx) + if not pose_sequences_2d: + return [] + + data_list = [] + for i, pose_seq in enumerate(pose_sequences_2d): + data_info = dict() + + keypoints_2d = pose_seq.pred_instances.keypoints + keypoints_2d = np.squeeze( + keypoints_2d, + axis=0) if keypoints_2d.ndim == 4 else keypoints_2d + + T, K, C = keypoints_2d.shape + + data_info['keypoints'] = keypoints_2d + data_info['keypoints_visible'] = np.ones(( + T, + K, + ), + dtype=np.float32) + data_info['lifting_target'] = np.zeros((K, 3), dtype=np.float32) + data_info['lifting_target_visible'] = np.ones((K, 1), + dtype=np.float32) + data_info['camera_param'] = dict( + w=self.video_info['width'], h=self.video_info['height']) + + data_info.update(self.model.dataset_meta) + data_list.append(self.pipeline(data_info)) + + return data_list + + @torch.no_grad() + def forward(self, + inputs: Union[dict, tuple], + rebase_keypoint_height: bool = False): + """Perform forward pass through the model and process the results. + + Args: + inputs (Union[dict, tuple]): The inputs for the model. + rebase_keypoint_height (bool, optional): Flag to rebase the + height of the keypoints (z-axis). Defaults to False. + + Returns: + list: A list of data samples, each containing the model's output + results. + """ + + pose_lift_results = self.model.test_step(inputs) + + # Post-processing of pose estimation results + pose_est_results_converted = self._buffer['pose_est_results_list'][-1] + for idx, pose_lift_res in enumerate(pose_lift_results): + # Update track_id from the pose estimation results + pose_lift_res.track_id = pose_est_results_converted[idx].get( + 'track_id', 1e4) + + # Invert x and z values of the keypoints + keypoints = pose_lift_res.pred_instances.keypoints + keypoints = keypoints[..., [0, 2, 1]] + keypoints[..., 0] = -keypoints[..., 0] + keypoints[..., 2] = -keypoints[..., 2] + + # If rebase_keypoint_height is True, adjust z-axis values + if rebase_keypoint_height: + keypoints[..., 2] -= np.min( + keypoints[..., 2], axis=-1, keepdims=True) + + pose_lift_results[idx].pred_instances.keypoints = keypoints + + pose_lift_results = sorted( + pose_lift_results, key=lambda x: x.get('track_id', 1e4)) + + data_samples = [merge_data_samples(pose_lift_results)] + return data_samples + + def __call__( + self, + inputs: InputsType, + return_datasample: bool = False, + batch_size: int = 1, + out_dir: Optional[str] = None, + **kwargs, + ) -> dict: + """Call the inferencer. + + Args: + inputs (InputsType): Inputs for the inferencer. + return_datasample (bool): Whether to return results as + :obj:`BaseDataElement`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + out_dir (str, optional): directory to save visualization + results and predictions. Will be overoden if vis_out_dir or + pred_out_dir are given. Defaults to None + **kwargs: Key words arguments passed to :meth:`preprocess`, + :meth:`forward`, :meth:`visualize` and :meth:`postprocess`. + Each key in kwargs should be in the corresponding set of + ``preprocess_kwargs``, ``forward_kwargs``, + ``visualize_kwargs`` and ``postprocess_kwargs``. + + Returns: + dict: Inference and visualization results. + """ + if out_dir is not None: + if 'vis_out_dir' not in kwargs: + kwargs['vis_out_dir'] = f'{out_dir}/visualizations' + if 'pred_out_dir' not in kwargs: + kwargs['pred_out_dir'] = f'{out_dir}/predictions' + + ( + preprocess_kwargs, + forward_kwargs, + visualize_kwargs, + postprocess_kwargs, + ) = self._dispatch_kwargs(**kwargs) + + # preprocessing + if isinstance(inputs, str) and inputs.startswith('webcam'): + inputs = self._get_webcam_inputs(inputs) + batch_size = 1 + if not visualize_kwargs.get('show', False): + warnings.warn('The display mode is closed when using webcam ' + 'input. It will be turned on automatically.') + visualize_kwargs['show'] = True + else: + inputs = self._inputs_to_list(inputs) + + inputs = self.preprocess( + inputs, batch_size=batch_size, **preprocess_kwargs) + + preds = [] + + for proc_inputs, ori_inputs in inputs: + preds = self.forward(proc_inputs, **forward_kwargs) + + visualization = self.visualize(ori_inputs, preds, + **visualize_kwargs) + results = self.postprocess(preds, visualization, return_datasample, + **postprocess_kwargs) + yield results + + if self._video_input: + self._finalize_video_processing( + postprocess_kwargs.get('pred_out_dir', '')) + self._buffer.clear() + + def visualize(self, + inputs: list, + preds: List[PoseDataSample], + return_vis: bool = False, + show: bool = False, + draw_bbox: bool = False, + wait_time: float = 0, + radius: int = 3, + thickness: int = 1, + kpt_thr: float = 0.3, + vis_out_dir: str = '', + window_name: str = '', + window_close_event_handler: Optional[Callable] = None + ) -> List[np.ndarray]: + """Visualize predictions. + + Args: + inputs (list): Inputs preprocessed by :meth:`_inputs_to_list`. + preds (Any): Predictions of the model. + return_vis (bool): Whether to return images with predicted results. + show (bool): Whether to display the image in a popup window. + Defaults to False. + wait_time (float): The interval of show (ms). Defaults to 0 + draw_bbox (bool): Whether to draw the bounding boxes. + Defaults to False + radius (int): Keypoint radius for visualization. Defaults to 3 + thickness (int): Link thickness for visualization. Defaults to 1 + kpt_thr (float): The threshold to visualize the keypoints. + Defaults to 0.3 + vis_out_dir (str, optional): Directory to save visualization + results w/o predictions. If left as empty, no file will + be saved. Defaults to ''. + window_name (str, optional): Title of display window. + window_close_event_handler (callable, optional): + + Returns: + List[np.ndarray]: Visualization results. + """ + if (not return_vis) and (not show) and (not vis_out_dir): + return + + if getattr(self, 'visualizer', None) is None: + raise ValueError('Visualization needs the "visualizer" term' + 'defined in the config, but got None.') + + self.visualizer.radius = radius + self.visualizer.line_width = thickness + + results = [] + + for single_input, pred in zip(inputs, preds): + if isinstance(single_input, str): + img = mmcv.imread(single_input, channel_order='rgb') + elif isinstance(single_input, np.ndarray): + img = mmcv.bgr2rgb(single_input) + else: + raise ValueError('Unsupported input type: ' + f'{type(single_input)}') + + # since visualization and inference utilize the same process, + # the wait time is reduced when a video input is utilized, + # thereby eliminating the issue of inference getting stuck. + wait_time = 1e-5 if self._video_input else wait_time + + visualization = self.visualizer.add_datasample( + window_name, + img, + data_sample=pred, + det_data_sample=self._buffer['pose2d_results'], + draw_gt=False, + det_kpt_color=self.pose2d_model.visualizer.kpt_color, + det_dataset_skeleton=self.pose2d_model.visualizer.skeleton, + det_dataset_link_color=self.pose2d_model.visualizer.link_color, + draw_bbox=draw_bbox, + show=show, + wait_time=wait_time, + kpt_thr=kpt_thr) + results.append(visualization) + + if vis_out_dir: + out_img = mmcv.rgb2bgr(visualization) + _, file_extension = os.path.splitext(vis_out_dir) + if file_extension: + dir_name = os.path.dirname(vis_out_dir) + file_name = os.path.basename(vis_out_dir) + else: + dir_name = vis_out_dir + file_name = None + mkdir_or_exist(dir_name) + + if self._video_input: + + if self.video_info['writer'] is None: + fourcc = cv2.VideoWriter_fourcc(*'mp4v') + if file_name is None: + file_name = os.path.basename( + self.video_info['name']) + out_file = join_path(dir_name, file_name) + self.video_info['writer'] = cv2.VideoWriter( + out_file, fourcc, self.video_info['fps'], + (visualization.shape[1], visualization.shape[0])) + self.video_info['writer'].write(out_img) + + else: + assert False + + if return_vis: + return results + else: + return [] diff --git a/model-index.yml b/model-index.yml index 33fb73aa4a..1965917479 100644 --- a/model-index.yml +++ b/model-index.yml @@ -74,6 +74,7 @@ Import: - configs/body_2d_keypoint/topdown_regression/coco/mobilenetv2_rle_coco.yml - configs/body_2d_keypoint/topdown_regression/mpii/resnet_mpii.yml - configs/body_2d_keypoint/topdown_regression/mpii/resnet_rle_mpii.yml +- configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml - configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose_coco_wholebody_face.yml - configs/face_2d_keypoint/rtmpose/wflw/rtmpose_wflw.yml - configs/face_2d_keypoint/topdown_heatmap/300w/hrnetv2_300w.yml diff --git a/tests/test_apis/test_inferencers/test_mmpose_inferencer.py b/tests/test_apis/test_inferencers/test_mmpose_inferencer.py index af48bc2129..f679df27b6 100644 --- a/tests/test_apis/test_inferencers/test_mmpose_inferencer.py +++ b/tests/test_apis/test_inferencers/test_mmpose_inferencer.py @@ -15,7 +15,7 @@ class TestMMPoseInferencer(TestCase): - def test_call(self): + def test_pose2d_call(self): try: from mmdet.apis.det_inferencer import DetInferencer # noqa: F401 except (ImportError, ModuleNotFoundError): @@ -88,3 +88,37 @@ def test_call(self): os.listdir(f'{tmp_dir}/predictions')) self.assertTrue(inferencer._video_input) self.assertIn(len(results['predictions']), (4, 5)) + + def test_pose3d_call(self): + try: + from mmdet.apis.det_inferencer import DetInferencer # noqa: F401 + except (ImportError, ModuleNotFoundError): + return unittest.skip('mmdet is not installed') + + # top-down model + if platform.system().lower() == 'windows': + # the default human pose estimator utilizes rtmdet-m detector + # through alias, which seems not compatible with windows + det_model = 'demo/mmdetection_cfg/faster_rcnn_r50_fpn_coco.py' + det_weights = 'https://download.openmmlab.com/mmdetection/v2.0/' \ + 'faster_rcnn/faster_rcnn_r50_fpn_1x_coco/' \ + 'faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + else: + det_model, det_weights = None, None + inferencer = MMPoseInferencer( + pose3d='human3d', det_model=det_model, det_weights=det_weights) + + # `inputs` is path to a video + inputs = 'https://user-images.githubusercontent.com/87690686/' \ + '164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4' + with TemporaryDirectory() as tmp_dir: + results = defaultdict(list) + for res in inferencer(inputs, out_dir=tmp_dir): + for key in res: + results[key].extend(res[key]) + self.assertIn('164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4', + os.listdir(f'{tmp_dir}/visualizations')) + self.assertIn( + '164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.json', + os.listdir(f'{tmp_dir}/predictions')) + self.assertTrue(inferencer._video_input) diff --git a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py new file mode 100644 index 0000000000..356b38dddc --- /dev/null +++ b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py @@ -0,0 +1,124 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import os.path as osp +import platform +import unittest +from collections import defaultdict +from tempfile import TemporaryDirectory +from unittest import TestCase + +import mmcv +import torch + +from mmpose.apis.inferencers import Pose2DInferencer, Pose3DInferencer + + +class TestPose3DInferencer(TestCase): + + def _get_det_model_weights(self): + if platform.system().lower() == 'windows': + # the default human/animal pose estimator utilizes rtmdet-m + # detector through alias, which seems not compatible with windows + det_model = 'demo/mmdetection_cfg/faster_rcnn_r50_fpn_coco.py' + det_weights = 'https://download.openmmlab.com/mmdetection/v2.0/' \ + 'faster_rcnn/faster_rcnn_r50_fpn_1x_coco/' \ + 'faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + else: + det_model, det_weights = None, None + + return det_model, det_weights + + def test_init(self): + + try: + from mmdet.apis.det_inferencer import DetInferencer # noqa: F401 + except (ImportError, ModuleNotFoundError): + return unittest.skip('mmdet is not installed') + + det_model, det_weights = self._get_det_model_weights() + + # 1. init with config path and checkpoint + inferencer = Pose3DInferencer( + model= # noqa + 'configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py', # noqa + weights= # noqa + 'https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth', # noqa + pose2d_model='configs/body_2d_keypoint/simcc/coco/' + 'simcc_res50_8xb64-210e_coco-256x192.py', + pose2d_weights='https://download.openmmlab.com/mmpose/' + 'v1/body_2d_keypoint/simcc/coco/' + 'simcc_res50_8xb64-210e_coco-256x192-8e0f5b59_20220919.pth', + det_model=det_model, + det_weights=det_weights, + det_cat_ids=0 if det_model else None) + self.assertIsInstance(inferencer.model, torch.nn.Module) + self.assertIsInstance(inferencer.pose2d_model, Pose2DInferencer) + + # 2. init with config name + inferencer = Pose3DInferencer( + model='configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_' + 'videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py', + pose2d_model='configs/body_2d_keypoint/simcc/coco/' + 'simcc_res50_8xb64-210e_coco-256x192.py', + det_model=det_model, + det_weights=det_weights, + det_cat_ids=0 if det_model else None) + self.assertIsInstance(inferencer.model, torch.nn.Module) + self.assertIsInstance(inferencer.pose2d_model, Pose2DInferencer) + + # 3. init with alias + inferencer = Pose3DInferencer( + model='human3d', + det_model=det_model, + det_weights=det_weights, + det_cat_ids=0 if det_model else None) + self.assertIsInstance(inferencer.model, torch.nn.Module) + self.assertIsInstance(inferencer.pose2d_model, Pose2DInferencer) + + def test_call(self): + + try: + from mmdet.apis.det_inferencer import DetInferencer # noqa: F401 + except (ImportError, ModuleNotFoundError): + return unittest.skip('mmdet is not installed') + + # top-down model + det_model, det_weights = self._get_det_model_weights() + inferencer = Pose3DInferencer( + model='human3d', + det_model=det_model, + det_weights=det_weights, + det_cat_ids=0 if det_model else None) + + img_path = 'tests/data/coco/000000197388.jpg' + img = mmcv.imread(img_path) + + # `inputs` is path to an image + inputs = img_path + with self.assertRaises(ValueError): + results = next(inferencer(inputs, return_vis=True)) + + # `inputs` is an image array + inputs = img + with self.assertRaises(ValueError): + results = next(inferencer(inputs)) + + # `inputs` is path to a directory + inputs = osp.dirname(img_path) + with self.assertRaises(ValueError): + results = next(inferencer(inputs)) + + # `inputs` is path to a video + inputs = 'https://user-images.githubusercontent.com/87690686/' \ + '164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4' + with TemporaryDirectory() as tmp_dir: + results = defaultdict(list) + for res in inferencer(inputs, out_dir=tmp_dir): + for key in res: + results[key].extend(res[key]) + self.assertIn('164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4', + os.listdir(f'{tmp_dir}/visualizations')) + self.assertIn( + '164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.json', + os.listdir(f'{tmp_dir}/predictions')) + self.assertTrue(inferencer._video_input) From d1621e9d5920f631622a2e437fcb75a390101788 Mon Sep 17 00:00:00 2001 From: Xuan Ju <89566272+juxuan27@users.noreply.github.com> Date: Mon, 12 Jun 2023 10:50:23 +0800 Subject: [PATCH 20/52] [Feature] Support Human-Art Dataset (#2304) --- README.md | 1 + README_CN.md | 1 + configs/_base_/datasets/humanart.py | 181 +++++ configs/_base_/datasets/humanart_aic.py | 205 +++++ configs/body_2d_keypoint/rtmpose/README.md | 18 + .../rtmpose-l_8xb256-420e_humanart-256x192.py | 232 ++++++ .../rtmpose-m_8xb256-420e_humanart-256x192.py | 232 ++++++ .../rtmpose-s_8xb256-420e_humanart-256x192.py | 232 ++++++ .../rtmpose/humanart/rtmpose_humanart.md | 110 +++ .../rtmpose/humanart/rtmpose_humanart.yml | 106 +++ .../topdown_heatmap/README.md | 16 + ...iTPose-base_8xb64-210e_humanart-256x192.py | 150 ++++ ...TPose-small_8xb64-210e_humanart-256x192.py | 155 ++++ .../humanart/vitpose_humanart.md | 85 +++ .../humanart/vitpose_humanart.yml | 79 ++ docs/en/dataset_zoo/2d_body_keypoint.md | 52 ++ docs/zh_cn/dataset_zoo/2d_body_keypoint.md | 52 ++ mmpose/datasets/datasets/body/__init__.py | 3 +- .../datasets/body/humanart_dataset.py | 73 ++ .../digital_art/000000001648.jpg | Bin 0 -> 1024164 bytes .../garage_kits/000000005603.jpg | Bin 0 -> 225722 bytes .../real_human/acrobatics/000000000590.jpg | Bin 0 -> 76762 bytes tests/data/humanart/test_humanart.json | 716 ++++++++++++++++++ .../humanart/test_humanart_det_AP_H_56.json | 145 ++++ .../test_humanart_dataset.py | 160 ++++ tools/dist_train.sh | 0 26 files changed, 3003 insertions(+), 1 deletion(-) create mode 100644 configs/_base_/datasets/humanart.py create mode 100644 configs/_base_/datasets/humanart_aic.py create mode 100644 configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md create mode 100644 configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml create mode 100644 mmpose/datasets/datasets/body/humanart_dataset.py create mode 100644 tests/data/humanart/2D_virtual_human/digital_art/000000001648.jpg create mode 100644 tests/data/humanart/3D_virtual_human/garage_kits/000000005603.jpg create mode 100644 tests/data/humanart/real_human/acrobatics/000000000590.jpg create mode 100644 tests/data/humanart/test_humanart.json create mode 100644 tests/data/humanart/test_humanart_det_AP_H_56.json create mode 100644 tests/test_datasets/test_datasets/test_body_datasets/test_humanart_dataset.py mode change 100644 => 100755 tools/dist_train.sh diff --git a/README.md b/README.md index 749b75e307..2b78649703 100644 --- a/README.md +++ b/README.md @@ -284,6 +284,7 @@ A summary can be found in the [Model Zoo](https://mmpose.readthedocs.io/en/lates - [x] [InterHand2.6M](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#interhand2-6m-eccv-2020) \[[homepage](https://mks0601.github.io/InterHand2.6M/)\] (ECCV'2020) - [x] [AP-10K](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#ap-10k-neurips-2021) \[[homepage](https://github.com/AlexTheBad/AP-10K)\] (NeurIPS'2021) - [x] [Horse-10](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#horse-10-wacv-2021) \[[homepage](http://www.mackenziemathislab.org/horse10)\] (WACV'2021) +- [x] [Human-Art](#todo) \[[homepage](https://idea-research.github.io/HumanArt/)\] (CVPR'2023)
diff --git a/README_CN.md b/README_CN.md index 5e48568001..f74b3adb76 100644 --- a/README_CN.md +++ b/README_CN.md @@ -282,6 +282,7 @@ MMPose v1.0.0 是一个重大更新,包括了大量的 API 和配置文件的 - [x] [InterHand2.6M](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/datasets.html#interhand2-6m-eccv-2020) \[[主页](https://mks0601.github.io/InterHand2.6M/)\] (ECCV'2020) - [x] [AP-10K](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#ap-10k-neurips-2021) \[[主页](https://github.com/AlexTheBad/AP-10K)\] (NeurIPS'2021) - [x] [Horse-10](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/datasets.html#horse-10-wacv-2021) \[[主页](http://www.mackenziemathislab.org/horse10)\] (WACV'2021) +- [x] [Human-Art](#todo) \[[homepage](https://idea-research.github.io/HumanArt/)\] (CVPR'2023) diff --git a/configs/_base_/datasets/humanart.py b/configs/_base_/datasets/humanart.py new file mode 100644 index 0000000000..b549269b69 --- /dev/null +++ b/configs/_base_/datasets/humanart.py @@ -0,0 +1,181 @@ +dataset_info = dict( + dataset_name='Human-Art', + paper_info=dict( + author='Ju, Xuan and Zeng, Ailing and ' + 'Wang, Jianan and Xu, Qiang and Zhang, Lei', + title='Human-Art: A Versatile Human-Centric Dataset ' + 'Bridging Natural and Artificial Scenes', + container='Proceedings of the IEEE/CVF Conference on ' + 'Computer Vision and Pattern Recognition', + year='2023', + homepage='https://idea-research.github.io/HumanArt/', + ), + keypoint_info={ + 0: + dict(name='nose', id=0, color=[51, 153, 255], type='upper', swap=''), + 1: + dict( + name='left_eye', + id=1, + color=[51, 153, 255], + type='upper', + swap='right_eye'), + 2: + dict( + name='right_eye', + id=2, + color=[51, 153, 255], + type='upper', + swap='left_eye'), + 3: + dict( + name='left_ear', + id=3, + color=[51, 153, 255], + type='upper', + swap='right_ear'), + 4: + dict( + name='right_ear', + id=4, + color=[51, 153, 255], + type='upper', + swap='left_ear'), + 5: + dict( + name='left_shoulder', + id=5, + color=[0, 255, 0], + type='upper', + swap='right_shoulder'), + 6: + dict( + name='right_shoulder', + id=6, + color=[255, 128, 0], + type='upper', + swap='left_shoulder'), + 7: + dict( + name='left_elbow', + id=7, + color=[0, 255, 0], + type='upper', + swap='right_elbow'), + 8: + dict( + name='right_elbow', + id=8, + color=[255, 128, 0], + type='upper', + swap='left_elbow'), + 9: + dict( + name='left_wrist', + id=9, + color=[0, 255, 0], + type='upper', + swap='right_wrist'), + 10: + dict( + name='right_wrist', + id=10, + color=[255, 128, 0], + type='upper', + swap='left_wrist'), + 11: + dict( + name='left_hip', + id=11, + color=[0, 255, 0], + type='lower', + swap='right_hip'), + 12: + dict( + name='right_hip', + id=12, + color=[255, 128, 0], + type='lower', + swap='left_hip'), + 13: + dict( + name='left_knee', + id=13, + color=[0, 255, 0], + type='lower', + swap='right_knee'), + 14: + dict( + name='right_knee', + id=14, + color=[255, 128, 0], + type='lower', + swap='left_knee'), + 15: + dict( + name='left_ankle', + id=15, + color=[0, 255, 0], + type='lower', + swap='right_ankle'), + 16: + dict( + name='right_ankle', + id=16, + color=[255, 128, 0], + type='lower', + swap='left_ankle') + }, + skeleton_info={ + 0: + dict(link=('left_ankle', 'left_knee'), id=0, color=[0, 255, 0]), + 1: + dict(link=('left_knee', 'left_hip'), id=1, color=[0, 255, 0]), + 2: + dict(link=('right_ankle', 'right_knee'), id=2, color=[255, 128, 0]), + 3: + dict(link=('right_knee', 'right_hip'), id=3, color=[255, 128, 0]), + 4: + dict(link=('left_hip', 'right_hip'), id=4, color=[51, 153, 255]), + 5: + dict(link=('left_shoulder', 'left_hip'), id=5, color=[51, 153, 255]), + 6: + dict(link=('right_shoulder', 'right_hip'), id=6, color=[51, 153, 255]), + 7: + dict( + link=('left_shoulder', 'right_shoulder'), + id=7, + color=[51, 153, 255]), + 8: + dict(link=('left_shoulder', 'left_elbow'), id=8, color=[0, 255, 0]), + 9: + dict( + link=('right_shoulder', 'right_elbow'), id=9, color=[255, 128, 0]), + 10: + dict(link=('left_elbow', 'left_wrist'), id=10, color=[0, 255, 0]), + 11: + dict(link=('right_elbow', 'right_wrist'), id=11, color=[255, 128, 0]), + 12: + dict(link=('left_eye', 'right_eye'), id=12, color=[51, 153, 255]), + 13: + dict(link=('nose', 'left_eye'), id=13, color=[51, 153, 255]), + 14: + dict(link=('nose', 'right_eye'), id=14, color=[51, 153, 255]), + 15: + dict(link=('left_eye', 'left_ear'), id=15, color=[51, 153, 255]), + 16: + dict(link=('right_eye', 'right_ear'), id=16, color=[51, 153, 255]), + 17: + dict(link=('left_ear', 'left_shoulder'), id=17, color=[51, 153, 255]), + 18: + dict( + link=('right_ear', 'right_shoulder'), id=18, color=[51, 153, 255]) + }, + joint_weights=[ + 1., 1., 1., 1., 1., 1., 1., 1.2, 1.2, 1.5, 1.5, 1., 1., 1.2, 1.2, 1.5, + 1.5 + ], + sigmas=[ + 0.026, 0.025, 0.025, 0.035, 0.035, 0.079, 0.079, 0.072, 0.072, 0.062, + 0.062, 0.107, 0.107, 0.087, 0.087, 0.089, 0.089 + ]) diff --git a/configs/_base_/datasets/humanart_aic.py b/configs/_base_/datasets/humanart_aic.py new file mode 100644 index 0000000000..e999427536 --- /dev/null +++ b/configs/_base_/datasets/humanart_aic.py @@ -0,0 +1,205 @@ +dataset_info = dict( + dataset_name='humanart', + paper_info=[ + dict( + author='Ju, Xuan and Zeng, Ailing and ' + 'Wang, Jianan and Xu, Qiang and Zhang, ' + 'Lei', + title='Human-Art: A Versatile Human-Centric Dataset ' + 'Bridging Natural and Artificial Scenes', + container='CVPR', + year='2023', + homepage='https://idea-research.github.io/HumanArt/', + ), + dict( + author='Wu, Jiahong and Zheng, He and Zhao, Bo and ' + 'Li, Yixin and Yan, Baoming and Liang, Rui and ' + 'Wang, Wenjia and Zhou, Shipei and Lin, Guosen and ' + 'Fu, Yanwei and others', + title='Ai challenger: A large-scale dataset for going ' + 'deeper in image understanding', + container='arXiv', + year='2017', + homepage='https://github.com/AIChallenger/AI_Challenger_2017', + ), + ], + keypoint_info={ + 0: + dict(name='nose', id=0, color=[51, 153, 255], type='upper', swap=''), + 1: + dict( + name='left_eye', + id=1, + color=[51, 153, 255], + type='upper', + swap='right_eye'), + 2: + dict( + name='right_eye', + id=2, + color=[51, 153, 255], + type='upper', + swap='left_eye'), + 3: + dict( + name='left_ear', + id=3, + color=[51, 153, 255], + type='upper', + swap='right_ear'), + 4: + dict( + name='right_ear', + id=4, + color=[51, 153, 255], + type='upper', + swap='left_ear'), + 5: + dict( + name='left_shoulder', + id=5, + color=[0, 255, 0], + type='upper', + swap='right_shoulder'), + 6: + dict( + name='right_shoulder', + id=6, + color=[255, 128, 0], + type='upper', + swap='left_shoulder'), + 7: + dict( + name='left_elbow', + id=7, + color=[0, 255, 0], + type='upper', + swap='right_elbow'), + 8: + dict( + name='right_elbow', + id=8, + color=[255, 128, 0], + type='upper', + swap='left_elbow'), + 9: + dict( + name='left_wrist', + id=9, + color=[0, 255, 0], + type='upper', + swap='right_wrist'), + 10: + dict( + name='right_wrist', + id=10, + color=[255, 128, 0], + type='upper', + swap='left_wrist'), + 11: + dict( + name='left_hip', + id=11, + color=[0, 255, 0], + type='lower', + swap='right_hip'), + 12: + dict( + name='right_hip', + id=12, + color=[255, 128, 0], + type='lower', + swap='left_hip'), + 13: + dict( + name='left_knee', + id=13, + color=[0, 255, 0], + type='lower', + swap='right_knee'), + 14: + dict( + name='right_knee', + id=14, + color=[255, 128, 0], + type='lower', + swap='left_knee'), + 15: + dict( + name='left_ankle', + id=15, + color=[0, 255, 0], + type='lower', + swap='right_ankle'), + 16: + dict( + name='right_ankle', + id=16, + color=[255, 128, 0], + type='lower', + swap='left_ankle'), + 17: + dict( + name='head_top', + id=17, + color=[51, 153, 255], + type='upper', + swap=''), + 18: + dict(name='neck', id=18, color=[51, 153, 255], type='upper', swap='') + }, + skeleton_info={ + 0: + dict(link=('left_ankle', 'left_knee'), id=0, color=[0, 255, 0]), + 1: + dict(link=('left_knee', 'left_hip'), id=1, color=[0, 255, 0]), + 2: + dict(link=('right_ankle', 'right_knee'), id=2, color=[255, 128, 0]), + 3: + dict(link=('right_knee', 'right_hip'), id=3, color=[255, 128, 0]), + 4: + dict(link=('left_hip', 'right_hip'), id=4, color=[51, 153, 255]), + 5: + dict(link=('left_shoulder', 'left_hip'), id=5, color=[51, 153, 255]), + 6: + dict(link=('right_shoulder', 'right_hip'), id=6, color=[51, 153, 255]), + 7: + dict( + link=('left_shoulder', 'right_shoulder'), + id=7, + color=[51, 153, 255]), + 8: + dict(link=('left_shoulder', 'left_elbow'), id=8, color=[0, 255, 0]), + 9: + dict( + link=('right_shoulder', 'right_elbow'), id=9, color=[255, 128, 0]), + 10: + dict(link=('left_elbow', 'left_wrist'), id=10, color=[0, 255, 0]), + 11: + dict(link=('right_elbow', 'right_wrist'), id=11, color=[255, 128, 0]), + 12: + dict(link=('left_eye', 'right_eye'), id=12, color=[51, 153, 255]), + 13: + dict(link=('nose', 'left_eye'), id=13, color=[51, 153, 255]), + 14: + dict(link=('nose', 'right_eye'), id=14, color=[51, 153, 255]), + 15: + dict(link=('left_eye', 'left_ear'), id=15, color=[51, 153, 255]), + 16: + dict(link=('right_eye', 'right_ear'), id=16, color=[51, 153, 255]), + 17: + dict(link=('left_ear', 'left_shoulder'), id=17, color=[51, 153, 255]), + 18: + dict( + link=('right_ear', 'right_shoulder'), id=18, color=[51, 153, 255]), + 19: + dict(link=('head_top', 'neck'), id=11, color=[51, 153, 255]), + }, + joint_weights=[ + 1., 1., 1., 1., 1., 1., 1., 1.2, 1.2, 1.5, 1.5, 1., 1., 1.2, 1.2, 1.5, + 1.5, 1.5 + ], + sigmas=[ + 0.026, 0.025, 0.025, 0.035, 0.035, 0.079, 0.079, 0.072, 0.072, 0.062, + 0.062, 0.107, 0.107, 0.087, 0.087, 0.089, 0.089, 0.026, 0.026 + ]) diff --git a/configs/body_2d_keypoint/rtmpose/README.md b/configs/body_2d_keypoint/rtmpose/README.md index 3037974917..38fd938376 100644 --- a/configs/body_2d_keypoint/rtmpose/README.md +++ b/configs/body_2d_keypoint/rtmpose/README.md @@ -37,3 +37,21 @@ Results on CrowdPose test with [YOLOv3](https://github.com/eriklindernoren/PyTor | Model | Input Size | AP | AR | Details and Download | | :-------: | :--------: | :---: | :---: | :------------------------------------------------------: | | RTMPose-m | 256x192 | 0.706 | 0.788 | [rtmpose_crowdpose.md](./crowdpose/rtmpose_crowdpose.md) | + +### Human-Art Dataset + +Results on Human-Art validation dataset with detector having human AP of 56.2 on Human-Art validation dataset + +| Model | Input Size | AP | AR | Details and Download | +| :-------: | :--------: | :---: | :---: | :---------------------------------------------------: | +| RTMPose-s | 256x192 | 0.311 | 0.381 | [rtmpose_humanart.md](./humanart/rtmpose_humanart.md) | +| RTMPose-m | 256x192 | 0.355 | 0.417 | [rtmpose_humanart.md](./humanart/rtmpose_humanart.md) | +| RTMPose-l | 256x192 | 0.378 | 0.442 | [rtmpose_humanart.md](./humanart/rtmpose_humanart.md) | + +Results on Human-Art validation dataset with ground-truth bounding-box + +| Model | Input Size | AP | AR | Details and Download | +| :-------: | :--------: | :---: | :---: | :---------------------------------------------------: | +| RTMPose-s | 256x192 | 0.698 | 0.732 | [rtmpose_humanart.md](./humanart/rtmpose_humanart.md) | +| RTMPose-m | 256x192 | 0.728 | 0.759 | [rtmpose_humanart.md](./humanart/rtmpose_humanart.md) | +| RTMPose-l | 256x192 | 0.753 | 0.783 | [rtmpose_humanart.md](./humanart/rtmpose_humanart.md) | diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py new file mode 100644 index 0000000000..384a712d95 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py @@ -0,0 +1,232 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +max_epochs = 420 +stage2_num_epochs = 30 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + # use cosine lr from 210 to 420 epoch + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(192, 256), + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1., + widen_factor=1., + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmpose/cspnext-l_udp-aic-coco_210e-256x192-273b7631_20230130.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1024, + out_channels=17, + input_size=codec['input_size'], + in_featuremap_size=(6, 8), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' +# })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=64, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + # bbox_file=f'{data_root}HumanArt/person_detection_results/' + # 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py new file mode 100644 index 0000000000..30178cbb6d --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py @@ -0,0 +1,232 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +max_epochs = 420 +stage2_num_epochs = 30 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + # use cosine lr from 210 to 420 epoch + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(192, 256), + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmpose/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=768, + out_channels=17, + input_size=codec['input_size'], + in_featuremap_size=(6, 8), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' +# })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=64, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + # bbox_file=f'{data_root}HumanArt/person_detection_results/' + # 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py new file mode 100644 index 0000000000..b4263f25e7 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py @@ -0,0 +1,232 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +max_epochs = 420 +stage2_num_epochs = 30 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + # use cosine lr from 210 to 420 epoch + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(192, 256), + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmpose/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=512, + out_channels=17, + input_size=codec['input_size'], + in_featuremap_size=(6, 8), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' +# })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=64, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + # bbox_file=f'{data_root}HumanArt/person_detection_results/' + # 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md new file mode 100644 index 0000000000..bfd925b2c8 --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md @@ -0,0 +1,110 @@ + + +
+RTMPose (arXiv'2023) + +```bibtex +@misc{https://doi.org/10.48550/arxiv.2303.07399, + doi = {10.48550/ARXIV.2303.07399}, + url = {https://arxiv.org/abs/2303.07399}, + author = {Jiang, Tao and Lu, Peng and Zhang, Li and Ma, Ningsheng and Han, Rui and Lyu, Chengqi and Li, Yining and Chen, Kai}, + keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences}, + title = {RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose}, + publisher = {arXiv}, + year = {2023}, + copyright = {Creative Commons Attribution 4.0 International} +} + +``` + +
+ + + +
+RTMDet (arXiv'2022) + +```bibtex +@misc{lyu2022rtmdet, + title={RTMDet: An Empirical Study of Designing Real-Time Object Detectors}, + author={Chengqi Lyu and Wenwei Zhang and Haian Huang and Yue Zhou and Yudong Wang and Yanyi Liu and Shilong Zhang and Kai Chen}, + year={2022}, + eprint={2212.07784}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +
+ + + +
+COCO (ECCV'2014) + +```bibtex +@inproceedings{lin2014microsoft, + title={Microsoft coco: Common objects in context}, + author={Lin, Tsung-Yi and Maire, Michael and Belongie, Serge and Hays, James and Perona, Pietro and Ramanan, Deva and Doll{\'a}r, Piotr and Zitnick, C Lawrence}, + booktitle={European conference on computer vision}, + pages={740--755}, + year={2014}, + organization={Springer} +} +``` + +
+ +
+Human-Art (CVPR'2023) + +```bibtex +@inproceedings{ju2023humanart, + title={Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes}, + author={Ju, Xuan and Zeng, Ailing and Jianan, Wang and Qiang, Xu and Lei, Zhang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), + year={2023}} +``` + +
+ +Results on Human-Art validation dataset with detector having human AP of 56.2 on Human-Art validation dataset + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-s-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 0.199 | 0.328 | 0.198 | 0.261 | 0.418 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.json) | +| [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.311 | 0.462 | 0.323 | 0.381 | 0.540 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | +| [rtmpose-m-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 0.239 | 0.372 | 0.243 | 0.302 | 0.455 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.json) | +| [rtmpose-m-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py) | 256x192 | 0.355 | 0.503 | 0.377 | 0.417 | 0.568 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.json) | +| [rtmpose-l-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 0.260 | 0.393 | 0.267 | 0.323 | 0.472 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco_pt-aic-coco_420e-256x192-1352a4d2_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco_pt-aic-coco_420e-256x192-1352a4d2_20230127.json) | +| [rtmpose-l-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py) | 256x192 | 0.378 | 0.521 | 0.399 | 0.442 | 0.584 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.json) | + +Results on Human-Art validation dataset with ground-truth bounding-box + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-s-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 0.480 | 0.739 | 0.498 | 0.521 | 0.763 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.json) | +| [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.698 | 0.893 | 0.768 | 0.732 | 0.903 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | +| [rtmpose-m-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 0.532 | 0.765 | 0.563 | 0.571 | 0.789 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.json) | +| [rtmpose-m-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py) | 256x192 | 0.728 | 0.895 | 0.791 | 0.759 | 0.906 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.json) | +| [rtmpose-l-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 0.564 | 0.789 | 0.602 | 0.599 | 0.808 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco_pt-aic-coco_420e-256x192-1352a4d2_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco_pt-aic-coco_420e-256x192-1352a4d2_20230127.json) | +| [rtmpose-l-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py) | 256x192 | 0.753 | 0.905 | 0.812 | 0.783 | 0.915 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.json) | + +Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 dataset + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-s-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 0.716 | 0.892 | 0.789 | 0.768 | 0.929 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.json) | +| [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.706 | 0.888 | 0.780 | 0.759 | 0.928 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | +| [rtmpose-m-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 0.746 | 0.899 | 0.817 | 0.795 | 0.935 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.json) | +| [rtmpose-m-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py) | 256x192 | 0.725 | 0.892 | 0.795 | 0.775 | 0.929 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.json) | +| [rtmpose-l-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 0.758 | 0.906 | 0.826 | 0.806 | 0.942 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco_pt-aic-coco_420e-256x192-1352a4d2_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco_pt-aic-coco_420e-256x192-1352a4d2_20230127.json) | +| [rtmpose-l-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py) | 256x192 | 0.748 | 0.901 | 0.816 | 0.796 | 0.938 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.json) | + +Results on COCO val2017 with ground-truth bounding box + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.725 | 0.916 | 0.798 | 0.753 | 0.925 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | +| [rtmpose-m-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py) | 256x192 | 0.744 | 0.916 | 0.818 | 0.770 | 0.930 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.json) | +| [rtmpose-l-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py) | 256x192 | 0.770 | 0.927 | 0.840 | 0.794 | 0.939 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.json) | diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml new file mode 100644 index 0000000000..f0f21b2d6f --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml @@ -0,0 +1,106 @@ +Collections: +- Name: RTMPose + Paper: + Title: "RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose" + URL: https://arxiv.org/abs/2303.07399 + README: https://github.com/open-mmlab/mmpose/blob/main/projects/rtmpose/README.md +Models: +- Config: configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py + In Collection: RTMPose + Metadata: + Architecture: &id001 + - RTMPose + Training Data: &id002 + - COCO + - Human-Art + Name: rtmpose-l_8xb256-420e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.748 + AP@0.5: 0.901 + AP@0.75: 0.816 + AR: 0.796 + AR@0.5: 0.938 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.378 + AP@0.5: 0.521 + AP@0.75: 0.399 + AR: 0.442 + AR@0.5: 0.584 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.753 + AP@0.5: 0.905 + AP@0.75: 0.812 + AR: 0.783 + AR@0.5: 0.915 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.pth +- Config: configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-m_8xb256-420e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.725 + AP@0.5: 0.892 + AP@0.75: 0.795 + AR: 0.775 + AR@0.5: 0.929 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.355 + AP@0.5: 0.503 + AP@0.75: 0.377 + AR: 0.417 + AR@0.5: 0.568 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.728 + AP@0.5: 0.895 + AP@0.75: 0.791 + AR: 0.759 + AR@0.5: 0.906 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.pth +- Config: configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-s_8xb256-420e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.706 + AP@0.5: 0.888 + AP@0.75: 0.780 + AR: 0.759 + AR@0.5: 0.928 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.311 + AP@0.5: 0.462 + AP@0.75: 0.323 + AR: 0.381 + AR@0.5: 0.540 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.698 + AP@0.5: 0.893 + AP@0.75: 0.768 + AR: 0.732 + AR@0.5: 0.903 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth diff --git a/configs/body_2d_keypoint/topdown_heatmap/README.md b/configs/body_2d_keypoint/topdown_heatmap/README.md index 9e23b874bc..47aae219e4 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/README.md +++ b/configs/body_2d_keypoint/topdown_heatmap/README.md @@ -115,3 +115,19 @@ Results on PoseTrack2018 val with ground-truth bounding boxes. | HRNet-w48 | 256x192 | 84.6 | [hrnet_posetrack18.md](./posetrack18/hrnet_posetrack18.md) | | HRNet-w32 | 256x192 | 83.4 | [hrnet_posetrack18.md](./posetrack18/hrnet_posetrack18.md) | | ResNet-50 | 256x192 | 81.2 | [resnet_posetrack18.md](./posetrack18/resnet_posetrack18.md) | + +### Human-Art Dataset + +Results on Human-Art validation dataset with detector having human AP of 56.2 on Human-Art validation dataset + +| Model | Input Size | AP | AR | Details and Download | +| :-------: | :--------: | :---: | :---: | :---------------------------------------------------: | +| ViTPose-s | 256x192 | 0.381 | 0.448 | [vitpose_humanart.md](./humanart/vitpose_humanart.md) | +| ViTPose-b | 256x192 | 0.410 | 0.475 | [vitpose_humanart.md](./humanart/vitpose_humanart.md) | + +Results on Human-Art validation dataset with ground-truth bounding-box + +| Model | Input Size | AP | AR | Details and Download | +| :-------: | :--------: | :---: | :---: | :---------------------------------------------------: | +| ViTPose-s | 256x192 | 0.738 | 0.768 | [vitpose_humanart.md](./humanart/vitpose_humanart.md) | +| ViTPose-b | 256x192 | 0.759 | 0.790 | [vitpose_humanart.md](./humanart/vitpose_humanart.md) | diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py new file mode 100644 index 0000000000..6f08f404fb --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py @@ -0,0 +1,150 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=210, val_interval=10) + +# optimizer +custom_imports = dict( + imports=['mmpose.engine.optim_wrappers.layer_decay_optim_wrapper'], + allow_failed_imports=False) + +optim_wrapper = dict( + optimizer=dict( + type='AdamW', lr=5e-4, betas=(0.9, 0.999), weight_decay=0.1), + paramwise_cfg=dict( + num_layers=12, + layer_decay_rate=0.75, + custom_keys={ + 'bias': dict(decay_multi=0.0), + 'pos_embed': dict(decay_mult=0.0), + 'relative_position_bias_table': dict(decay_mult=0.0), + 'norm': dict(decay_mult=0.0), + }, + ), + constructor='LayerDecayOptimWrapperConstructor', + clip_grad=dict(max_norm=1., norm_type=2), +) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +# codec settings +codec = dict( + type='UDPHeatmap', input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='mmcls.VisionTransformer', + arch='base', + img_size=(256, 192), + patch_size=16, + qkv_bias=True, + drop_path_rate=0.3, + with_cls_token=False, + output_cls_token=False, + patch_cfg=dict(padding=2), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'v1/pretrained_models/mae_pretrain_vit_base.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=768, + out_channels=17, + deconv_out_channels=(256, 256), + deconv_kernel_sizes=(4, 4), + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=False, + )) + +# base dataset settings +data_root = 'data/' +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=4, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + bbox_file=f'{data_root}HumanArt/person_detection_results/' + 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py new file mode 100644 index 0000000000..6daf87cc90 --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py @@ -0,0 +1,155 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=210, val_interval=10) + +# optimizer +custom_imports = dict( + imports=['mmpose.engine.optim_wrappers.layer_decay_optim_wrapper'], + allow_failed_imports=False) + +optim_wrapper = dict( + optimizer=dict( + type='AdamW', lr=5e-4, betas=(0.9, 0.999), weight_decay=0.1), + paramwise_cfg=dict( + num_layers=12, + layer_decay_rate=0.8, + custom_keys={ + 'bias': dict(decay_multi=0.0), + 'pos_embed': dict(decay_mult=0.0), + 'relative_position_bias_table': dict(decay_mult=0.0), + 'norm': dict(decay_mult=0.0), + }, + ), + constructor='LayerDecayOptimWrapperConstructor', + clip_grad=dict(max_norm=1., norm_type=2), +) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +# codec settings +codec = dict( + type='UDPHeatmap', input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='mmcls.VisionTransformer', + arch={ + 'embed_dims': 384, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 384 * 4 + }, + img_size=(256, 192), + patch_size=16, + qkv_bias=True, + drop_path_rate=0.1, + with_cls_token=False, + output_cls_token=False, + patch_cfg=dict(padding=2), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'v1/pretrained_models/mae_pretrain_vit_small.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=384, + out_channels=17, + deconv_out_channels=(256, 256), + deconv_kernel_sizes=(4, 4), + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=False, + )) + +# base dataset settings +data_root = 'data/' +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=4, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + bbox_file=f'{data_root}HumanArt/person_detection_results/' + 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md new file mode 100644 index 0000000000..1e559aa4da --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md @@ -0,0 +1,85 @@ +To utilize ViTPose, you'll need to have [MMClassification](https://github.com/open-mmlab/mmclassification). To install the required version, run the following command: + +```shell +mim install 'mmcls>=1.0.0rc5' +``` + + + +
+ +ViTPose (NeurIPS'2022) + +```bibtex +@inproceedings{ + xu2022vitpose, + title={Vi{TP}ose: Simple Vision Transformer Baselines for Human Pose Estimation}, + author={Yufei Xu and Jing Zhang and Qiming Zhang and Dacheng Tao}, + booktitle={Advances in Neural Information Processing Systems}, + year={2022}, +} +``` + +
+ + + +
+COCO-WholeBody (ECCV'2020) + +```bibtex +@inproceedings{jin2020whole, + title={Whole-Body Human Pose Estimation in the Wild}, + author={Jin, Sheng and Xu, Lumin and Xu, Jin and Wang, Can and Liu, Wentao and Qian, Chen and Ouyang, Wanli and Luo, Ping}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + year={2020} +} +``` + +
+ +
+Human-Art (CVPR'2023) + +```bibtex +@inproceedings{ju2023humanart, + title={Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes}, + author={Ju, Xuan and Zeng, Ailing and Jianan, Wang and Qiang, Xu and Lei, Zhang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), + year={2023}} +``` + +
+ +Results on Human-Art validation dataset with detector having human AP of 56.2 on Human-Art validation dataset + +> With classic decoder + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [ViTPose-S-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py) | 256x192 | 0.228 | 0.371 | 0.229 | 0.298 | 0.467 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192-62d7a712_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192-62d7a712_20230314.json) | +| [ViTPose-S-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py) | 256x192 | 0.381 | 0.532 | 0.405 | 0.448 | 0.602 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.json) | +| [ViTPose-B-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.270 | 0.423 | 0.272 | 0.340 | 0.510 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.json) | +| [ViTPose-B-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.410 | 0.549 | 0.434 | 0.475 | 0.615 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.json) | + +Results on Human-Art validation dataset with ground-truth bounding-box + +> With classic decoder + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [ViTPose-S-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py) | 256x192 | 0.507 | 0.758 | 0.531 | 0.551 | 0.780 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192-62d7a712_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192-62d7a712_20230314.json) | +| [ViTPose-S-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py) | 256x192 | 0.738 | 0.905 | 0.802 | 0.768 | 0.911 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.json) | +| [ViTPose-B-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.555 | 0.782 | 0.590 | 0.599 | 0.809 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.json) | +| [ViTPose-B-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.759 | 0.905 | 0.823 | 0.790 | 0.917 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.json) | + +Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 dataset + +> With classic decoder + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [ViTPose-S-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py) | 256x192 | 0.739 | 0.903 | 0.816 | 0.792 | 0.942 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192-62d7a712_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192-62d7a712_20230314.json) | +| [ViTPose-S-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py) | 256x192 | 0.737 | 0.902 | 0.811 | 0.792 | 0.942 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.json) | +| [ViTPose-B-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.757 | 0.905 | 0.829 | 0.810 | 0.946 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.json) | +| [ViTPose-B-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.758 | 0.906 | 0.829 | 0.812 | 0.946 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.json) | diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml new file mode 100644 index 0000000000..12a557fbf6 --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml @@ -0,0 +1,79 @@ +Collections: +- Name: ViTPose + Paper: + Title: 'ViTPose: Simple Vision Transformer Baselines for Human Pose Estimation' + URL: https://arxiv.org/abs/2204.12484 + README: https://github.com/open-mmlab/mmpose/blob/main/docs/src/papers/algorithms/vitpose.md + Metadata: + Training Resources: 8x A100 GPUs +Models: +- Config: configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py + In Collection: ViTPose + Metadata: + Architecture: &id001 + - ViTPose + - Classic Head + Model Size: Small + Training Data: &id002 + - COCO + - Human-Art + Name: td-hm_ViTPose-small_8xb64-210e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.737 + AP@0.5: 0.902 + AP@0.75: 0.811 + AR: 0.792 + AR@0.5: 0.942 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.381 + AP@0.5: 0.532 + AP@0.75: 0.405 + AR: 0.448 + AR@0.5: 0.602 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.738 + AP@0.5: 0.905 + AP@0.75: 0.802 + AR: 0.768 + AR@0.5: 0.911 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth +- Config: configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py + In Collection: ViTPose + Metadata: + Architecture: *id001 + Model Size: Base + Training Data: *id002 + Name: td-hm_ViTPose-base_8xb64-210e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.758 + AP@0.5: 0.906 + AP@0.75: 0.829 + AR: 0.812 + AR@0.5: 0.946 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.410 + AP@0.5: 0.549 + AP@0.75: 0.434 + AR: 0.475 + AR@0.5: 0.615 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.759 + AP@0.5: 0.905 + AP@0.75: 0.823 + AR: 0.790 + AR@0.5: 0.917 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth diff --git a/docs/en/dataset_zoo/2d_body_keypoint.md b/docs/en/dataset_zoo/2d_body_keypoint.md index c5bf70a3f8..4448ebe8f4 100644 --- a/docs/en/dataset_zoo/2d_body_keypoint.md +++ b/docs/en/dataset_zoo/2d_body_keypoint.md @@ -13,6 +13,7 @@ MMPose supported datasets: - [CrowdPose](#crowdpose) \[ [Homepage](https://github.com/Jeff-sjtu/CrowdPose) \] - [OCHuman](#ochuman) \[ [Homepage](https://github.com/liruilong940607/OCHumanApi) \] - [MHP](#mhp) \[ [Homepage](https://lv-mhp.github.io/dataset) \] + - [Human-Art](#humanart) \[ [Homepage](https://idea-research.github.io/HumanArt/) \] - Videos - [PoseTrack18](#posetrack18) \[ [Homepage](https://posetrack.net/users/download.php) \] - [sub-JHMDB](#sub-jhmdb-dataset) \[ [Homepage](http://jhmdb.is.tue.mpg.de/dataset) \] @@ -386,6 +387,57 @@ mmpose │ │ │-- ...~~~~ ``` +## Human-Art dataset + + + +
+Human-Art (CVPR'2023) + +```bibtex +@inproceedings{ju2023humanart, + title={Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes}, + author={Ju, Xuan and Zeng, Ailing and Jianan, Wang and Qiang, Xu and Lei, Zhang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), + year={2023}} +``` + +
+ +
+ +
+ +For [Human-Art](https://idea-research.github.io/HumanArt/) data, please download the images and annotation files from [its website](https://idea-research.github.io/HumanArt/). You need to fill in the [data form](https://docs.google.com/forms/d/e/1FAIpQLScroT_jvw6B9U2Qca1_cl5Kmmu1ceKtlh6DJNmWLte8xNEhEw/viewform) to get access to the data. +Move them under $MMPOSE/data, and make them look like this: + +```text +mmpose +├── mmpose +├── docs +├── tests +├── tools +├── configs +|── data + │── HumanArt + │-- images + │ │-- 2D_virtual_human + │ │ |-- cartoon + │ │ | |-- 000000000000.jpg + │ │ | |-- ... + │ │ |-- digital_art + │ │ |-- ... + │ |-- 3D_virtual_human + │ |-- real_human + |-- annotations + │ │-- validation_humanart.json + │ │-- training_humanart_coco.json + |-- person_detection_results + │ │-- HumanArt_validation_detections_AP_H_56_person.json +``` + +You can choose whether to download other annotation files in Human-Art. If you want to use additional annotation files (e.g. validation set of cartoon), you need to edit the corresponding code in config file. + ## PoseTrack18 diff --git a/docs/zh_cn/dataset_zoo/2d_body_keypoint.md b/docs/zh_cn/dataset_zoo/2d_body_keypoint.md index c5bf70a3f8..4448ebe8f4 100644 --- a/docs/zh_cn/dataset_zoo/2d_body_keypoint.md +++ b/docs/zh_cn/dataset_zoo/2d_body_keypoint.md @@ -13,6 +13,7 @@ MMPose supported datasets: - [CrowdPose](#crowdpose) \[ [Homepage](https://github.com/Jeff-sjtu/CrowdPose) \] - [OCHuman](#ochuman) \[ [Homepage](https://github.com/liruilong940607/OCHumanApi) \] - [MHP](#mhp) \[ [Homepage](https://lv-mhp.github.io/dataset) \] + - [Human-Art](#humanart) \[ [Homepage](https://idea-research.github.io/HumanArt/) \] - Videos - [PoseTrack18](#posetrack18) \[ [Homepage](https://posetrack.net/users/download.php) \] - [sub-JHMDB](#sub-jhmdb-dataset) \[ [Homepage](http://jhmdb.is.tue.mpg.de/dataset) \] @@ -386,6 +387,57 @@ mmpose │ │ │-- ...~~~~ ``` +## Human-Art dataset + + + +
+Human-Art (CVPR'2023) + +```bibtex +@inproceedings{ju2023humanart, + title={Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes}, + author={Ju, Xuan and Zeng, Ailing and Jianan, Wang and Qiang, Xu and Lei, Zhang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), + year={2023}} +``` + +
+ +
+ +
+ +For [Human-Art](https://idea-research.github.io/HumanArt/) data, please download the images and annotation files from [its website](https://idea-research.github.io/HumanArt/). You need to fill in the [data form](https://docs.google.com/forms/d/e/1FAIpQLScroT_jvw6B9U2Qca1_cl5Kmmu1ceKtlh6DJNmWLte8xNEhEw/viewform) to get access to the data. +Move them under $MMPOSE/data, and make them look like this: + +```text +mmpose +├── mmpose +├── docs +├── tests +├── tools +├── configs +|── data + │── HumanArt + │-- images + │ │-- 2D_virtual_human + │ │ |-- cartoon + │ │ | |-- 000000000000.jpg + │ │ | |-- ... + │ │ |-- digital_art + │ │ |-- ... + │ |-- 3D_virtual_human + │ |-- real_human + |-- annotations + │ │-- validation_humanart.json + │ │-- training_humanart_coco.json + |-- person_detection_results + │ │-- HumanArt_validation_detections_AP_H_56_person.json +``` + +You can choose whether to download other annotation files in Human-Art. If you want to use additional annotation files (e.g. validation set of cartoon), you need to edit the corresponding code in config file. + ## PoseTrack18 diff --git a/mmpose/datasets/datasets/body/__init__.py b/mmpose/datasets/datasets/body/__init__.py index a4aeef8519..1405b0d675 100644 --- a/mmpose/datasets/datasets/body/__init__.py +++ b/mmpose/datasets/datasets/body/__init__.py @@ -2,6 +2,7 @@ from .aic_dataset import AicDataset from .coco_dataset import CocoDataset from .crowdpose_dataset import CrowdPoseDataset +from .humanart_dataset import HumanArtDataset from .jhmdb_dataset import JhmdbDataset from .mhp_dataset import MhpDataset from .mpii_dataset import MpiiDataset @@ -13,5 +14,5 @@ __all__ = [ 'CocoDataset', 'MpiiDataset', 'MpiiTrbDataset', 'AicDataset', 'CrowdPoseDataset', 'OCHumanDataset', 'MhpDataset', 'PoseTrack18Dataset', - 'JhmdbDataset', 'PoseTrack18VideoDataset' + 'JhmdbDataset', 'PoseTrack18VideoDataset', 'HumanArtDataset' ] diff --git a/mmpose/datasets/datasets/body/humanart_dataset.py b/mmpose/datasets/datasets/body/humanart_dataset.py new file mode 100644 index 0000000000..719f35fc9e --- /dev/null +++ b/mmpose/datasets/datasets/body/humanart_dataset.py @@ -0,0 +1,73 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpose.registry import DATASETS +from ..base import BaseCocoStyleDataset + + +@DATASETS.register_module() +class HumanArtDataset(BaseCocoStyleDataset): + """Human-Art dataset for pose estimation. + + "Human-Art: A Versatile Human-Centric Dataset + Bridging Natural and Artificial Scenes", CVPR'2023. + More details can be found in the `paper + `__ . + + Human-Art keypoints:: + + 0: 'nose', + 1: 'left_eye', + 2: 'right_eye', + 3: 'left_ear', + 4: 'right_ear', + 5: 'left_shoulder', + 6: 'right_shoulder', + 7: 'left_elbow', + 8: 'right_elbow', + 9: 'left_wrist', + 10: 'right_wrist', + 11: 'left_hip', + 12: 'right_hip', + 13: 'left_knee', + 14: 'right_knee', + 15: 'left_ankle', + 16: 'right_ankle' + + Args: + ann_file (str): Annotation file path. Default: ''. + bbox_file (str, optional): Detection result file path. If + ``bbox_file`` is set, detected bboxes loaded from this file will + be used instead of ground-truth bboxes. This setting is only for + evaluation, i.e., ignored when ``test_mode`` is ``False``. + Default: ``None``. + data_mode (str): Specifies the mode of data samples: ``'topdown'`` or + ``'bottomup'``. In ``'topdown'`` mode, each data sample contains + one instance; while in ``'bottomup'`` mode, each data sample + contains all instances in a image. Default: ``'topdown'`` + metainfo (dict, optional): Meta information for dataset, such as class + information. Default: ``None``. + data_root (str, optional): The root directory for ``data_prefix`` and + ``ann_file``. Default: ``None``. + data_prefix (dict, optional): Prefix for training data. Default: + ``dict(img=None, ann=None)``. + filter_cfg (dict, optional): Config for filter data. Default: `None`. + indices (int or Sequence[int], optional): Support using first few + data in annotation file to facilitate training/testing on a smaller + dataset. Default: ``None`` which means using all ``data_infos``. + serialize_data (bool, optional): Whether to hold memory using + serialized objects, when enabled, data loader workers can use + shared RAM from master process instead of making a copy. + Default: ``True``. + pipeline (list, optional): Processing pipeline. Default: []. + test_mode (bool, optional): ``test_mode=True`` means in test phase. + Default: ``False``. + lazy_init (bool, optional): Whether to load annotation during + instantiation. In some cases, such as visualization, only the meta + information of the dataset is needed, which is not necessary to + load annotation file. ``Basedataset`` can skip load annotations to + save time by set ``lazy_init=False``. Default: ``False``. + max_refetch (int, optional): If ``Basedataset.prepare_data`` get a + None img. The maximum extra number of cycles to get a valid + image. Default: 1000. + """ + + METAINFO: dict = dict(from_file='configs/_base_/datasets/humanart.py') diff --git a/tests/data/humanart/2D_virtual_human/digital_art/000000001648.jpg b/tests/data/humanart/2D_virtual_human/digital_art/000000001648.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8f2202760b0eb691319123f7f81b204af3235bf3 GIT binary patch literal 1024164 zcmbrlc~p{H{60!q*{G+?%u+dJWu;}NmYT{bPg$8#S!zzCW@b2KI4jVkmJ^kwWd@Zg znFE>gNI8+2k|WM2n23mi3?jqJeb4>g`@3u1zwWxf*X3Hcg}vU-yZ8Qlp3n0aH{`3+qHM>-Dj}>z(F(fW5+El zt?d3iea8Omx%19fuDZCoU32%l;eRvWR^V+^ctm7WbWH5SM+u2ZkDnwz&v@}NGwaps z><_sg^YV$G3O;`=E3c@ms;;SRYHn$5qx@+9+0)zCKR_EC8lGe@r~XW{X4td*MS)Nx zUXm)%r=>@;z+qA;uqij^$_qu>PS>j=m7m!Ns@DnfGhpt@_rClr zm}p6_PG)tQH%w_gG9KOAByB5y>cw8VxO3CRA%gc=Rc#ye@-+E9RU6YyFI7Ttu$x}? zlP`NDvy|4%V>emNQ9It|dR5@x<6ayXZ!R%?YIA0-ro@sL+T?sL->%5vuSc_bNj6Tg z;N9XXk?alVot;X^HK5Rdly9ko*hF_?&7+B#e>^X>ywd5=Nc%OzIBJcM8GRY7dotTY*Nyq;xxk@og*bJmenoZ9!5f&}hXMNvFx)#ZI>1rh;(bnN>&vaetvKKaqNV2)3u7r4KHpB#*5tqn|^ENTy zx>5f;^Ge4}9ANW9C42F}0->Az|$%@^UkHVrUr_!#l z4J-CW){A{ z%qwxb3jU*OpxCm!#ssZq0V5(V0Q&dr^N|Sy`?7V{53Dp*D=R@tiSJof7E~Q2gg_bZ z@HE*%ghz;qOdW#_jhth}X-bGwm9^ULmqE53E_Vu*5S8^xh>SCvui-v^Tl_R|+vR0b z&)FC}(|U+4Y0F)XupQL7Ns@$~Nom3FPjRej{~ctzWMgE8qSq`)9@6r{mbNg?!Y|rS zVUns;=(;uy@bfiF$Rqu8z1Qz_BfSrVmXGEz=VGzCFzTZD9P=~d$lz)F^(4e!e8ctS zkuRQ02MnpRgPyjT?kDUSEl6ICqm|{$3y0MjtF6^#E{2*)$i);TB(l`4$h99kYY6;t znx0)~$#TuY@9Ykl?i0X<$qX;xiP!;LuGI7#CB!&ZwjhS_;b}$wZ@x>rYCew-owfZP z?&Z#Lu8)JsR$+hZk3ORt&;`w>$Zt;a*Q6C0%wv0S!(kk}1>lJ_!HpmuWN*7~ko2DK zm{4?Nbj-t-!6e^xva#SS$0;Ee(fv6-i+HW*^+PweHEbNimNEPi69xj;86KZEYxwi4 zX3v`*?if*VX);qSU*Dohrh_?{prbBts&`yS2vkDerVBiI&`0JY&7U|{YqGGBYPQ{B zD>#D_u~Sk3=NX=LQKB(nob0Vx+Y=;|Z8{8Wx43`kAKkSS=Ny?|ainzH6Cx7Ot9tru zLj}e3JH^*tUq?q|9bw7Zg;(X{?nB#b|K?RD-8md@KWP}J+JDJF=Q*m^U{!iqEz$9IqGKoLv7PN1NMb0r@Uz2t1lLMO>o_?Rd2A?+ns)%}A=&wiB zIKJFqwQ@_dT6xIb(>2xz<}#B<9Uz|w z|0}-R%#vzjO86Ebv?ZT1lnI*aoAP76X*29jc>Xg2orc$RHlfGq(XxQUzp9tmWGhAjiy z6gcG}Vq?6k#0U1E;y{J&;dS}EicXq0Du0U`7-0^cXpjSeu%4opx(JxMT?+BucA*l& z0VnpZLQWpiZ{4vbmw0 zLfe8;9G}8aLN+1Cbu#_QWtrRG)sib^|4@exD@=MbW>CL%!p#*co`!~UJeSFSlL@uyb($8C9QkuLj$OEhEe=8w@Qg&WKOfyIm#oJ>!zKg)f!b5z0W5)Ff-Wcg;x7xX_2p^mMSTFsAD>LW zKo|WTxg#C{W*^subR{Gy)ETgLeSd44t=Eq&N{HPok~(+z=9gR3DNA%u9n=r0ERyXN ziNl)rckNvR1sZK&n`wmD<@ld(1Dr_uiq5LovYw_k+nyKGky^DMw|x~VunGh#|KOjj z$N9(Pj(DsJ9`=I?OS?HfGaiKHkg25J<*R1h`4zTN@=`E?h$6&7(wi$h^G`}hFTHC_ zizK%cA{v|JYyQjYivUN&;;?hDNqLbwFj+B1*aj(9N>uGq4GVWIMm)!-LUx;C2y|P zCUI5f8VM{SjFmi6|J;M59$33H0%Q0q^36Qn#82p2t4m(o#sK3As|?TW@sU1BZ9XU? z^Q)&S`b*}3GE#qbnXBW0GhV$OZ1m^rcdF;AG++7##$h>%LMgZ&u;UdTZvQ6!G<3L+ z3RMuqgmMiWS1p(DTROqY7Gv+xH!s9nesY`)j-1PfOfUG3iY};%0-9M%wYWp~z33Bh*BegLwjhP7n*Kg$T!e@)Mj5ccDH@5xN}>T>mMMx)&A(j_l`B`HTWlkN=QrIqrQQhMs(VqU$2_>XK?^(<`9MM2776}#h?v%`dMW5a- zrV>J;5WyYcbUo|ZD-S>%3V@jilQUp1--9~MvZG4vu`NpqnEP^(t@_r={5NoPgx8Kp z@1WUV2c4%`X61DSr+LOqUhtT3T&%$gPhLkK2C&_Of~fOlcHpbI&@N{aYmj z2F?FxU&HRe!V?5FdREZhE>n}2Ep>Isb02ru z=YfBvN4P{M`=tWrg6PlIuVOt#*n1WIvkP@}zR2eD^XZnW1Q3aq<_6|oFi5SJ#DHH|7u_JN?vYw|4y#D<0H~q9a(FtgP z!$`N6<(S>Y2aTa-obKF+Ak(Vc`MIT;uJ|+CF5&j5ou6;!Ufi;dO`)+zq&lbp0uMMK zH({)+(1xMZPTa*(gwTXo7t*BV$LHGIn~T9m*H-cmGuAIhq~CUsU? zGlf8Xq;8TXV}Lrjant#~KJv{>Xw8hv-eS$;QE2m6Boc4*C1QU^#t?>(M#VQCKq?_4 znHhFL7wB0;Xa`%gz>lmsH1l-1e%VIqXCsXbr+DJ>(76ncwdn;h8$InLaj4(egNtNs zIV^F0(7{K20bRP#OTuDeM5N!63&la9W8CQ_yjPY4^h`7xA`}eT$Tp z04`m~zC<@$>ao~#GLtT1YK}zfE&R-w!`Lr#2%#Y{0OE4|X^G*n21ZQrq1|+I>SYT` zDMi;RR$WLWRq(t-5J?M!BWbzJB-t69!XXuaIRz=oYklDX{v&PP0v8&9Nd zK?k&CCse$!fcb+^<5j}YLVjRJ7i=>wq+e3N*J_dC(ygx*N-~@`mLWV9`NMP|e$JBe z+pqUGjTnE64^7MCN6bhbMw!)$D1)}`HWd{4*?A%GPoHpJReny@@4j$~_F%MQ`ke90 z^tit;jUV+;017ut-B`UPYuzN?AEBv(@z3`8vefEREb=_ILppgUz5VBWC&Mf6$?S)k zXOgPNI{pdNT+znW#y$_^t#*@6r)DZc}dy|y=MPGI)`y44?U`~E3oII1iL zZ{mBjp0xgXU2O{kZvgg>a9)9pG4|Pnw|2V!tINfyd0a(fU_#R*c9Eu)+=?A7(b@nO ze0w^q9NsjDF_Q1+S3L-Mmn8F$8OvOos=yiS);v8D8>iN=%t8E0N(Z10KGxk10aLAE zKw5-2Ju?3$UnMHcMbY1iGj$aq&iQ^9e$X`qi1eb6O)?|8c_+YL+H>= zx$cZF-O^DPGS@B0J(hv%x1c}dg%yZ?T&^Tv6*YlCWg2UugxolC&g`-h(m|FUJ7dKf zp$pm^W3kO7P2~7y{+ee+Gdo_2(-@mJTI9drQSH!aV0%}Mn7V|1HGWw(eP`pj-4WT*kLRCq2Ah-ww@GMce7rn^J8y|!?}`|I`~S2d-1Kyiq4I$a$T&7EL-su z7H9|?Moy?E*zTcBwK7h`AtYxSce5)euq}kP9rp4B>OWu^@v{P9OP>I7+QV^cW?rsB zFuK38rdlqL@=Ulh$mKgq$b$<&ADQ>L(Nqa(YUJw?#n4r9#4)|?eL#PN60$|1--Z<& zl;p%d#WMa;uW7e-6`fFInHN(swn76EnF47nRwOY8j-d6eP4gzJbeTL0hwFELiF{F> z_stZehBU2*&A#MW4Xr29_DaYR)EbciyT%Jo^)iYvvw9rgS>Y zVZy4FkaMyFLV2|C$CCMC0QUe7=mBp{P?DuCjj4prWRAh$0tM{kJghrh2@!sZUs#S@ zxCdLm+}yf*=g0a?H!Lbx39S3qj)%+7ak+@$hiVp8nLY7QFvuoLC zT+Pog?U(22a}ijS0*mN@Z6n!2PaCOrCN2Hei(R4!~nn91mQf&&Qp$XNoW-oPm8V+2{bKTfj_~?0H`5aLC1b&rVw-Xu14kt{H zUTx|AQ~2`wEFc4XOI?)^aM`~gfZK*wo`L-hjkOHq-j{S6gi@qej-K^}o9WraD6oWB znJ8+$gv5s(Av#^XefQMB0s5HVk_on?9m zsJ>%LS6$}8sOCi7B3MwOUVM&7Dg)V{f!cjL z#g?uuu~ST9Ix>5^QqfbB}P6}=>d^0SrNlBpMh==>Ta9qoEo+hH?{z; zDYG!5=A3)td;fRWn#J|Y13(UIaRP5INtQDoS2u6o(f_rmZ2inYac&$VL|XFXSkW6A z!gT&9ekal0tW@%oT=Oo>c301J+`ocH_6nUS6`WuOQ%ySfTmCxYg8kaWYhAVtR(=_B zX@HLF?^u_*N$S>i#T$c+wFDpz2UO{ina+= ztlr{#zXlA{y&Hz5&0lGG<7{)*FssM+=#Xz-Lh(I?Vh2>X-WQ&CTYBt`_(#d)i|d4P z4?@UW+t(8r(gBw&HDb{l>QO%sR^zZdN=U;|c`W8r0LZGWq?JOG@ z9&5E6dEkQQk6jN{0dLp}%7qB7#I-Qm^~$Sd7u33ia2~8XCe-hk1i`$Uv6;)H0MW5Z?z0W+*E)64q7Y z6>wE+5qdD_bSOC1%x5p8v*bvDu^(JBUD3Jk)1}PX6ZRka(mxP_23T+9v7V7 z{?mILCvQdHLi~laN-y}p*@mrgD>)Rac#|5nBnKM%1xZg9^85C5i?);hG-hJ@!CI}c zq0`QOLb#A#A0Q;X`?lA_$YN>{OPwTOtiq;1z`Si6DlmWZ#$oipd(?sm+!kPaIis`d zpjkk2^O2^BasAoQ3sT5{8fs_73cs4VZdyee_Cq>e}RNEFkZ%leG=or-W=8Rs~V6|Z+CmT98~?$+%{ z@)p3wk%=-Fq%Td)+s#EqhNU4cLUuFVy4HNFw?`0^Cf2%y6GQST#ZV(2Ap=)wxbc%* zB&8{S)`i>ZQ*va@&<)in6`(#kPElk3ME}W8zC}>kx`$E{?MU^7V+I=WPKI0YnhfYb zn471Qw$b@A_uFm?u$c{pu8qG#F5|V1KP-@-TC28=`+01)rG4L<3< z3*s|r?*cKa3Bs($kP_sbuz-(YDQJmaK3Q1$tp!yF)&eGS?dXt(;j_PF5&+^<_Re76h7 z`(4ILfVh&h2v~cU$uEh=k*?DIYJ+{97N}h<=xa)d|IHYpxX|s@SY_!!6PNyxjXiXX23=BU1bo0G;!UN^cP1FB5Y;)rJV%(k8n48$| zmmAkdyE0wps966JMW!yi*$Wv>Z=~|e z9eaAeHqjM@Te2H^%YEOtdyF}_V1GAQP&QwFF)7~xz1nIl%Le&+uo3uE&~tjKGc;!1 z@ioYJ^lKdKK2#Bc?=Q`3jfq%=2rN(f37yHZ#vZ{o>or@H7@z%XY`Ht8s;4;=c*YX; zQk-n;gQiea8)Qj+MK-Nj`jT5@s%@t(Ou=O4Oz{82W%c5ToD9U~H=h_LI!}qR94qP9A+w^9jHufSTYAGJZ<*)_i(B19XS&V$ z@k?J?QO0kN4>$G#>5h2YMkS<=-&0{$x40*J|NG=15lp~pXVLHq80GPE))Mbk{=^8h z0-M8cv#nQ;ovT0ad3ca>OzZC8hH|nJa%SLN6-TV@!|sB!5&9`j?cIgP)o(9*=6-b@ zA1O50I(&O65^-y?t$Rvb0UI}r^dRqLRW5uQ>9p&t{MGpHG8nLDn9$~&YNM4#Z*tC{ zcgIawgxJtDn6f9m!BhmZ6Or6g#8Hy>p91mw`S3^9>xv{P-}Ined0~vPW53Z49lrE8 z@>HxRA}D{W2hmdZc7FTSe&7{O=!X>J@8v|X+u5_Ih4XIirQffIy*S`7>n0NA>@(P$ z80h-~(NE~7HOJfTb=dU8UE!gnmDFNfWdDMHCno*cEA(yc5T8esfp0L$)g~~zU+^~E zkkU#jE;?+yAt6-r5%y}J!gcGHS9Rt<$IUl~CWmx$em9HbOv1w=`eYb8o|I9SX4?}@ z)EMjBNFS0^%T89yCGsbK`;FH~>I=-+xi$AjK+=W;r3&=3mtcvFD+hpGlB|Du0g2(4 ziX=%bQ3mkdLlc<8#x*Q!$eOhVHfb@@gy6?-jm*~{B_TFHi4HQm)6NL9$mUWq6Sa%| zAGg$8I~2mr9W;^D?^D^}dZcZj>Ed^g@GWvN)B;r=|E9@1)mO%iHIrL?!J4Dhza@k_ zv}a^+x|r*m-L{S#!J@WrXF^hQq(BUFxT^fbGBIeiwUuRsE!c%3-cz_3fBu0&5uaE2YJ1hbMpa9sv5AD5f1Q#v5tJc51|#$9-g! zXKIp*COr_>QsJG|&aKAk)^@qC;18m64H=&;bex<0wp~jOq--HIYG~=~GtPC;OlzR`^ltz2F_c$Rhszbbg_m;Jc zr>(tu&7*#Mpn`$wLnyCgsY4wy1!1+*kCKr=RYD3ggrr|zuI4>%A^9a>em?AHf9Sqd zK0qyWx6Lk_`(mEwx*0w=8GJghX9*xo#kLVZaPh!3*4p8d*KCq2WxctbIJ9Nzp2-Zc z_n|kGxogtV_#%v#?%6KBSG)DiU3AKYEX@jP6L4cweG*S3o#mR($8S~7)G zTTq@K&SQoI=Y36_&YB9SemOvni1hwvG*;_W@dd{qrg_VyqPJgO6`kgFxKx~r8edw- zJ*T&%&chq7aIf_7fOVD%kyXZ@{Y>HqJ&&Td+-4uj4Zh07Mhh(}+^7rBy-NLb?=cr` zB%Ibjn`Nq$K8}eOuonOwzGrd4;}^AQZCxghP19p1VnGGURm%m1nQ;KJLW6}+0{k|+ zATgz&eLXxN>Lxc>>sZS4d|dxbciQbcVy}TZX>eg9VDA(B_YV!kcDbbrx zZLyJ@-$1n`>1pSzbHa}2*%kOIc*p!mzu4VlvTV5gz~mpR7yX&Fv(GE58i&K@ViVKC z&DSIw8yQY)G}!q0jDBrN0L} zr7OIv{O@iV{mI(gVv=)L&HlaaTB11Ca<0~iSm zFU6z5PQ~YVd8$T!pyd=xXYmA6udNZp8V|A7x|4{lm|i7h8)m;Gy{8SaIU3m%iHf#r zno*R^Ws8doO!`GI8T%CBs;@;SwrF%$%bkvT>z~y$ts9Gtnv-(x9enKEP=3I@yoc!o zwF5q1s&g(>o_Bp7N-!x$FOig*b13Nx(NpEeeQ?$c2xo;TcM$N4l3T8 z0>m(h89`m{9Cw(6E{M+X=xZw(sFwNNmR}c9bZvIl%)-J$_dcx}cKg+>`DM|5Wp^D* zB}bjd_15PrA&y_`Br!;A+*ZRd{}R-`e@5`?+HG6!V%>pLe5NG>b9@40@IH-N>Xxdj z^@s{$Pj5}*;>n}+@B#gVrut)=Mu%#I(lUo4d=`QQ>}2n3tM`A>#2ZH^{v2?`2UMT= zQsb>POI>GlFst7$se*6uqWo@fL+}Up4ko!=UiSrj@ZnN0C6YJ`uDknon|uNFeP{=V zP3V2t2MSd1gL>eC+k4`$@LrvMg&J@oYfC%Td!0562WA$j>vE~H@rzq=fxW#S$YVoT z4`_kQ&sntVNLNNB8|y`NdElm>+5yGn-oxZRqkR1To~sjBAM9+Q9)kUg9QL5M7T+Sq3?#QSXhsNnkyyfRKI z<5RVV%MUuoV+e}SJFvsViU3!2C$k#HNNtY-%KWe#U%4-TOX@8_Ga__+7$ynj6!cSw zO|UqP8%hX%%wCUW{yXqlYw(G?cf+W<3NqrtbWLu&ekvAkqOh^Qu(S{|%ql0ASP1{q zYSE4{oALY7&id{i5g&rNREGXpCYUGZJ}z+j{MHKwhZ##yqh`^om(%pMN7(HP>~xTr z{lVVLYvB>IcUiLZ$h|xY(dVJ3YNEMmpppLk5{smZm5#0#lerwrAco*B7Syy!7Qh+u zXSp%4ss2*tG-3D}u`leEj5o++sz)5JmUh$ zOiI;lJ~b7;s+PahM0#wyHGS-GiHM}iaT&g$1r^A{>5?o!-+6oc!eAJ}?wL#awHzOl z3?lccj<@FVICo>sK+u8{>QzxAiq~(s+%CZ-3IeY*Uyi|GUoxQs8rV60>m;V^Mitg2 z(*Q$5+AaI>C@CRw114;hB>jW~?nJZgK3;z17+ik8!VY&q1$36^xQh5bnC#3fpU?k> zZ6gK*d=hZ5X(y|t3UyU;dHC(;ordjyyna1WLUL2$;)2kr;ntBXk~dzYfMMF`TZ?Ut z0BWNOjT|50s5S!p#SH>MzY-!&%sz&XV`a=*d}EMr@wL{>{)zEEnr1xV+gO-9u7q4| zHOwoi^$iZm7X1d^OI>b|J%2>oC-cHz#=7Zv8y;x&C4t;&P03D~vyszd0Gl>mlB3RecUvGL$e}R7{MFj$8V2( z^UYybr7lLRq>~A3@F9_4l3cz)GfXvpb+Kfj?KmNzH@whUjJ$LoT}ZzVGV5GpCj;$v z>V--8^-U3eoO;@kLU;U@h0Vzh>mD`D%RCkxZNxP*O~&|_>&?buMR}s2V>_%EVMims zHs{U0E&f^;Xo%O4d0=|Sbrsq5D`GyiNBJF3k3fc&McbXJrW|MK%iylxw|Bd$>`B!d zf<-Oc@Nvlz3wn|cFFyS7!oGfU>{d=~Vs61$xa{Wpq8kE}^_j@;Yi*;T9FsH?M?F#d(WA=u)o?-rto92P<6x6$5ybB_~-@bVa%u66o2p$~hX zLH;C%P42LUW>`la^`@IrCL7DrTB?U8gK9E0ye>C&*ng0=yd2LVN;gl?6Z-k8&^B}PpGLxmzV1b5;|$nNpix? zOl0TD#@+}_H1OjKC=fOhKP{T#A==dUOml^f+(d$&3{CLf&||%8GR0&Y!6(GF8zyfU zo85zL8FB7Ar}L0PUWDd^Rc#+=z8bMNcD@d|E^(-O^xt!ZJ@G} zsprPJXGuY2WZ*w$mzA_+xjaA8wjiKbvgxTP_pF2~t52=7F{N`xz=GlC`t?b3%H+R^~j2l*G1e@x_a|=fg53D8?dbt#rb7R7ABKgYHT$cO9MUy z_He8;7o?{@Uspmt+}6d>@G+DyJZg#5)CoH&$j z2qCBS+9vO zfMx*n>i@L=gT^w$R%2;G>jGdu0GiXxs(pElxQ!RF9R-{bA_p8TyXC#}@#v=WppJAb zQoe#Je*}{&Y}9-WVxEd;&YCgj9H9sf1p3FRmDUi9t>J-TEF(`=`}k zZX$21k45#q2ix|2kwk!}@*6)+WK2w7E(J_Wbe^!}wuE$~4f^P`>Cxksi?2t>JJ1Yds`~ z1AE{ceHX!Hu4|>p(JNzPnY~p+8%YsXWAL_BG5Oil@<8M=Gyw1;y`MMy@zYeU=kcni z;6=Y!=+#)3+%nTIynQUvV{ukYFPQ8l!bjq?`vEe^PJq0^+m?qZI~}|>Wq@fw=pZzk z3$Tx^^<#WKJ8taN&Z`m%6{t}mXJjSWM}$oRVX&X$pBPsSbXHZE6mdv@I>!n9#H^;qYy6NSrK-*NVSoh!PiIle)MFlq$H2c-)GIXX^VyH(s8Ks%1%+zJpuBumiLI_GaQ^a#=A> zy6-+x@g{f;3~y71dCTdqpl;@jZ|R;he^Xd|IFwuHp@e*8i+ND)1{fFQRPMs%9Xa7$ zmfv;OFW(f7BV8Mv(H|HK7i*c%+TP2_Pdl;pcg8nt_Sni775d=Ur$aT8nvG1e4Bw9L z)ZCYy{={e0Ik4_{pc(a7G}9_qr!mt5+=OTf+LZ&QEkKmug&hh*xHmBxy&Ora1A|G( zBXAU9Zp@a$bx)&Vsv&6PM+pg2IxDqqLs~0dU@70fEir|o9 z&?!z&xFT!9twZyz}6)pS>}@V z@pkuxn*wgoVH7(RWOsV4cgS{rLsSsbigtI$AIf9DN@mJzM@W_;6%xV;eP#uzi=3zx z<}iu-x}iDdP+E7}-7_{TY{+VFkv+nVS)K_t z^1k-QUlP=Lz0|Ghkz1pG$1jprcizp4$^wJW0c^Qe>$R6UYg}`#^SzrYPi#n#()zK( z6<{te{4e96;}7t&E=wXNHo+16nk3l<``%E|n_LhG?@}2RvGK7gw5O(s)fg(d;!on6 zBDp^wU!Vq-<8`{ClKUueLT$fkOhnP`9gSa1h$;_S0!HqqSMaD5<6-AED?)SFM8m0B zIz+&Y+x_Lwj#XmdGqkFX>O{u zd%6z;x`sDz=fLUzVkmQlwffdF#x?1AmHl<&l0q1)5I7QlvA^+Pbozo|pgTG$ND>68WWG zt9pgYc^QrNAq`~H8Td)BgvtWaO!xFa5SlO;Crg*7;>TY`UHA=dtHgOjn3Z1AoLp=G zdMMR-B>S%tRG|cSI#X=A@wm;QVkxwPOlpW%^UIzH>~zp}T_}`{`jRGYTmO7$y}zH?C2zYt4o$~%6#L6oP-8`&RV6pXvzZ<1!a@7MSL<^D zf4onI9}X!lAx}XgWADZZ*@TuGuEVb6)%C7EI#w?(x0*GCysH9HC!z!tZY~TEp8#vd zT;ez?wdG0dA8$I=2A_5@8DK=KyZ*W0B?|C-P%q)9!@$;FSkHnQz zh-Priort1?=Ca6q0h%TNVQ}A3N6=!)T*f42NG_-D)kcN>>6;P1SdOtK=U=_+8-#Z{ zDR+wDh)CxoF-7QySAniWiRjGEW46N|Z`#%%_%`AF~$R)NpXENQj@pebL_9 zAE>X_%Kl+3>M-xi`KjT=lxF8vpWBn4jhUw@ogS0OP8jVES$%r22xsx_=Abe2dYFR- zM+rFzyH=%GH5sLZ+(Y|MF?$8(xwnFM<1zr9faR5OQL`~6DEoeFrCFTgDX6G-1e=FS zKNBmaPN|YEc%+9ObRyu$%M-LIO`gc-VBbOdaChVjXO7lq!D6If277k`wehf_dIF_8 z51=46B((Zq=Yz%c!lihU^X6CBpsxUIZlJpsxZuCHl@YvfFy66A9Fj zdgY1}i*dBydMo*6&K{=&kJ#Q}!Vm(9UiA-B0^^|h{zp&xh+x^9U22vQ zLMd__k>8}Gpq;O8o28R@HnCXZB#?l1w zD#LNZWnNp&=UdVr3Pnwy3000}fy16Ut}|`VfUIW*r>P9fzX|b}`0&T<8_v5DL0&#sM=f$30AyJ`3w-#&K(oE1-T5ypn@`YyQJ$^ zFb)2IGe6Eb?s4oVw){(YL!;?Iu-+l0>zhbRB!n0tE`%PEU|Y3f)oRr8@<5e=mu7m7d; zEdV>@4sRyKrL0jUgm~*X3)ts=hw2RC0UHaJ_BV?*OWPb=pQP#$w(3Ykz@rc3@>|B@ z@TyjO?B}Y01%ndJF^|c}R9vgXI4$oPv*1#jZ$qL7=lMccA4ACw-WqxAI zh7*yL#Ra+Ss`0*1SQnU#3wxo8cXaEDI=2c1jFbb-c<8i*R45RyLpL^*?^47_v78BnQ3Xpj$evvli`cun1ZWe982ank z09a1cyzKG@)%UY{) zb@e@Xcxtbf5TaUYs$Pku8oy&BsH}miU^!>!kN83OOE&Hg0VriJcP=-Qvt#{iizA-yX)~ zgna|vA`7ePc>TroM+M?vV4hXWrT3=4mPWP4KYZzFgggViMmkd$cK(2e19F_2J1e#c zvNt~~=z8XW;NK+rU;_d*bE>PQ0dhV?vo0<|QSotIvA@jbPs1msTm*bvG~CUQ$zow+ z&Fiv9(b zB;$YQB2-*8&Xj1Qy_YG{UgAQ)Ss(u-L3n~5Q2!jn5ZEi?nC&8|kky9{n82!`9}IT|`p7I?B$vxv)lhC~idq=sQ(J?cT6bLTVYM9Zxg7UhTZAa`QazRXt5JvH8Gh50|e$rjZ*;a%kMpF7R;J zwm2M;R#=f221%b8S&CCwN9o9~GRV})2D(!(Q#REfsHnd#ATn;N!D?w6#{ z1uTUOWw95oRW5?s2aGD9Bo+? zf}*fx4Nfc8g}xHAN)kG0+)Upf(Kwl66_vzYr@;bX6-L+=`jv%Lyppoue|eEJSM=ALL{(w6c&R>*t@Yh%f`rikPC z2H)tZ?&+n6Gs65P7qrK2YaJus=*3Wo!?!`2Atz2Oo2;^ZcJ237E7u0?P_WpV z6P-{k$xe-jO&;nkF}tJdleSNNp>xlj`17jhva8#L9=dZ2@k0oH@5pQy2&x~noOv6y zKDIIC3aOVr_7RALRg*PlT#ud!exGvmJFaZ=C~L&RnxDg;HS97EU2CWNu87Ps}3DW(WU(+|bd;zpVm1!0S!@6^Fa9 zc~Fw=aWZ(ny`G%kWn-nV&p+WRK|L(R^qgQIVcXR`nQ*RA_jk=>GxQBp_21qA<5Z7hS_zOP{{6bqa3y* zNe&}tVKFR;#judWHkz{yo7t(~_4)nhk3H<#b-l0G`}KOB-c9^rRrzCu=rGdqo^#Nn zv@55Syx&trZ_cbtQ&XA}?rkTBp@UIZSy3-kXPx2#(xX~#?razqgPjVMa7C;w9>Fy# zI1D}!cNdwXQv zG_=c_ZBm5W`5??j205Fl$L?%sZyjKKAp3hVa#PORGf0g@%}Vc$X?HZS&af9mthU$8)R~E4}tyhbC0OdDG&d6Oj>o+KkZ|7 zz$hET%Ucq6)~`93(+L&q*0p0kX*C!UJ?KHtRgWmA<%OmXx$W)^)BzKM8IgwYN`>4i8vC z%j9(b@ik&+6&a}mjMQ`uuHvX{Xqv~EmDJ(%3@%xVW!>4?X`PfDwEY+{tKs?1>^XeW zJ+H1cIQ;GhFM7nr;`nOtt8JJwk>#wZPaH4vF4!c@)_a!GT-G9QYbU;WM&5PW;|r*! zr{C&E!DJAtITc$rFG%X)vnZc;eMe)xdJRki2KW&0%Hc0R;t#Zbjkj!*X;z8n>{tUM z>&{6ch{j9sQp*>3FM^i0>C0y|CYPF>XKPUWhHla|esLRUU1q-FLHf5tlhTT3ferLH zjxF{BMDa^DR%{&irRdA#Bk!LQ#cYzZIK9KwkrkcQ(O~cu9|Idrf|9>+c7*M&9V#VH zx*Nx6TfyplggE&g7Y~zvy69Ovg+2e|icZy4Coi*srn1QLgz_e!bS7ii;6_Y*%-q%w z0*6Q9y(o0IRl?b z)OuXzA*pv`lkL@5(&F$M+q{JNuI(cIs3NVG@DAL>G|n)F6t7deCS}~8JqMT!Ws+^~ z7+P&-K~|gYa%?#^H0j~jD;DuK-#wO`esos)If(O91i&ZQGDVnDwE)kmp9I2&7D3rO z>8b^L+8Q>1pKX;o-*>flba2J>auyLTDrvc0T{1(UtxXg{pR;fOR&p8UTjs$RLZs^3 zw_bx-2srHHwth_(KP?jJDZgGhpA2Ae6xQD8-yJVv$yBgPWKgK;;ms7zavEV$X2v3Q zIGG=3gTCGli4zj62|awwBO!!tOULc^ zTei5@@+*FlKo2fOhV=503psHZl_m5EmN;>f)34p!#1pXJXbnA4H}bTjUF`N(JX&&STrp5t=E`H9$Utu(%7Asa- z3QfvFQ{MQ^d9Jvd{@=>P$}CfK{wou^Ph&xBt-WL&MDNEi2pOPZ!!SrH+em<0*M|QN zen&UhU_JIPr&=(-Nckrrii=m~4XYb0du}V8CClffc+n6_zBVe~s!cTIPj)r0YjKK= z+Y%ewJICp@9m^p~sKGPCF-m;@o|f?#f?LZKs&powDxeB`MBySTdDBXthVF=iA8~rK zH;b~!;F}(2FOvf}OlABMz`f0@s6<`o*_PVYT+te{`duoO&18yC@$G>3hPdcKY+D~- zR`^LgIKFPIeV}x!!Xkw{RS#|wvgTwED=9&@1Kn0u;URR$BMMLc>UiL&5gB?XDQ%-5 z-sDmm?vYVci~;%h%^=)mKL0X?i}lLpy6kRHSLz*QMyDpwn?5jS+>;xX89hS`1EU+A z50Lok4*iLGvq+8S(N$*xt63O74P30q`N6!8`qBlbvNW{?tgKW-a!5`J;SFRGPKQ6V z(iCW;jHw-w^b~F4J=|nLQB+ga+epoUF|BJKu;+YQSb6>SE&T($}NtO88o0)XRiLdDA{Shl&6Iw_-$XOW& zCv)o2x`D}x2A8Aj!3oq9NT2vAR`wdZAj2&7G&b)2WpgyD!Pos%0J)k9^Pg3OfLRo@TPScx<8U>|yd=>?f-@4^gceB<8 z#ha5j^*T^BA^c+;6wX^^n`zueK`RxttayfVYlMvFXdo`9&4*-%ij9ke{|}; z=5e*$``ye3b1s)f)Cb!(r|4(3ZHC`gVYMg?ZY2Z@A)7hb*=` z6Ts@)91l*YcbAaMUcY_n;<9LM{%%F;m`2-h_}&aCGF%fz@VnO`_BqmogIqiymO$ zcRqESch?RNZ-iG0b4@pE^}ua4cA3M4)jA=ts!b~XkN2But&jNl0Ok9-I~du?GU!{J z+A8~wXJ<}#qcqr61;_AG znZ{2}Q+hRYlFN$vCODTuohq41&05(^>*U*nzxmcYgt!?&8IJYU#^>Jh=%c%znUj9p z_kmXT%|Gi1{&8-Su5SDs4_Kzrg&l^h3i+qO=5J1CD6p~m(s_$Gx4x##p7~M!grTwf z|8#QIr&Y8MX10S|dcR2_zsEic1%hl3u8Bz&o5Z0cDiJ3$*E7{u@?>V?={sJy*Lc9# zdUWQ7`)?r;#Ucrbc}eP?ljx<*Z>!LKXw4a?RB|$8bbl+aT%LIFm!|0K#<1?kAZ5e0lMJ(A=R`AwJ!!#Rp(e}!gFq8Ka_^}( z(y>?L6l)|Nxk`XbuEvQ*bE^h%@ynA3m_0ZyDE)Tc%-TfFB?D!i4&#m`q<3LjAl72Z} zIi}!u_W|DaqlOLBPqE)Ae|(-I$F70tPr4H-|*Eb z9`fa{i)(WoIwSBWYs3TWj6<|mVmdE3t-i^?D8|eU+d5m_){7wpq!%Lq1s(h zQ6Y<$KeaRk%oxo|@Z6FGMsTDL3ChYry7sisrAF(bY-?)9Gt#SdsMjcv5_CM&=9_mK z>H5W!62BpEbfRbGu>O(o{6d?GfT|@6n-6b8z1^FlF?C^l^wY|Jjjckm8Rq6@{RVxp zvwA!-+n(fYlS?Bj2<_MjnMTQ~$p_+w6t7X6EIVyRdviROcoOvzC{q50gOXZbQIY5F zr4ZzMCX{_n$VZoST2Pmz-s<-Q;Qgw3z4BHMgS`)ZDes_t1|izWA&y zouA^D5)M8x%W9{3vua0YUOxE;rVJRF&P2)fvuE{T3?y4Y&tND9Y#}^Sd&q|$4%*aY z4;A)lS^1xZd-nwxc=}Wu_EhUZr89cy?x*gSwZ0!N)yid}7bLN09Fs7V^Fl$e4gv@O z&$v8Hvc2_i5HhRWdbNe?T zEaag3V2twXhVQqPjp(y(5Bmb^a9$gP#tHd>4~Eb;!^zpS%`z(3u-+c$8aE^co4eJu zfULWruPgFH(```Y`nWTHsNw@G;^dRlA5SWLv4WbN5SG;L|BKfgae_IUSnUN3SEe_0ogjE8j4C zX}#0%szA2&8ju6Gk2*ouIL5{i*IXbIHw<2+19^*{)h61jE(fy)V?#I~PpXhc42&Tk zDNpZhQApOg+|#K;{&B(}J{D4-lX?%9QESLqF&D=Vt=_vn?Q|d~vGgHhnY;aosfo2V z5WZU~40qcqGZV_j3ip`H)Eb1vX4c6$qx1O9i_4Lq%h|e(i8m)1iw`aGBAl*VVl7+f z@x4OhM^`>ig^r5!fDs$jgWhhH-m|2HsG8ko0_@9Td{%>cclwVrKMEOQYs5MUxN9$I zY+T+n^M!p$3Dyc*wm~P7v~mb8v4uZ4PJzn$wEWNpQ($}UJHTIU9u?Y$#)g`15E7$> zAkA7ptxZB3bCojPmgbj+^A$RE1gB@3DSx#u*LFhQzNs21b9`37*{u@aZtC^FrH;aB zZ7+*c*q#aJ83UQ-Ajkrfq7qg=30D(OXa4xqOA_7L0qKcF>enQy_FKEiTB{`&{uW}L zvYJyY(jM+66*|k$ZaRjdrzU?&bRUY_E8hf%zpQC;IQD7qyB@Jy@n74~kHG7KeoSI= zYRSZ;E_~2iA1Af0E3G{_kTKZ~4ztCVoym?o?-eWf76XdL>|i)ewm9UtM;FibLe~eS zt@Iuk1VAsa$%K=TV)7YUOx)S_X3@I%bYW6c#cW9~C3dkj6AO`gP-8K8XvxFn%H__@ zxbc#v1dt8H65u?|Uhj}c?n?1RwoH8}=z=PExlb8(MSd<7d}o&m$q!u}Ddl5vxSi*a z@2OFG^Nco*R&g#>Z6TH>Rvi7YeIg^C85&Fj6496Q4SJTVzHsP;1P{mwsXab)Mj_4_%kAlTj@mWt_= zJZNZOaW+-FX$0SJ>YZ*kRw z9@k;yy?klaOJGALM?+U;x_jce@OR$R<(cn8^>qs!fU=e~aEbhSj0doJ$jG=Mf9e0m zEH0csmtswFMlL^qcyq`(1Z5gqHxGNiJnz}mXU5gGHgnLuO9H1U&AkI)H8Q)ieUVM* z^G+9}2Gd;9y%U@31s=bF>Y;I|<3OX~gV@wzh=NFGYRW$xVbyD-dGDgRmfFHCaSE7d zYAi;GPW|}wGpk-q$fx>Tvl`>=g7 zp!nj`RqWqt!!3aUpYf%%5^Rqlkzt!X^GPrWV2^015SasUAYSLV_k;+Lx; z@wrBEA5@#;3hzlnWS4#uR1L9)mGsZCO+b)gzHjr2tI84gSJ=Abu|EF z>!2j;+~@ZH#{Q7|_jberyXAW_ZhIO2Yhi3)`5o;&yy?W}-BE6I(6$x*e81!OH&t;X z@to1i zmgCqTjT$%5KP3``>}Sc0o$9dp87r;&8+aePnVQuFg65%biZaq=Yeh8#M?D-XvTfEj z{?>2)@a5(H3k)22ID_Nt9;Qw()Y0In~^92Jku)+b3&|K*i)unI+ znJQa`mt3W2w2Bk)8fHlb9jlgJGZl-wemes45i?&a#b*5Vm0|Y1WIZh5u5}Qxz$4+h z&n-vh6&HV(z>6ehb(ab&zUY)jz76@nziPJ-#cX;9GzV!av?o z;knt(@wZ>^byf@XsGKb`Dy6(1p$~P&F+s4%13e&UWv(x;V~BHec<-g!j@Hsa2(T5C zEcj8c7r#%pE%VRL&c|R)CTryJ7bBW;jv1#k!RLT4XSN&Jp4s;ADO8S_AisB0)F3_v zO`B^B=}kG-KOub>auw|5A6EQzsAu$}*UDvM^xtNTe=nHr55n#fIEoLtvQIkUQCE5P zR{7#+!h23hD z>YeI8NK)d?-^?-a1xd@0m^mfW@3IUtUBo2cU4Z`qj0w@nlMC9{%9>8 zc(;g=j@zti7!yBP=tW`e?5x}QXn)SHo~=bKR18IU7jVuaf1f!ki{5~2!I&*AHRxZ@ z*cmXf`Fk6{0ow9dWy?;x!850$4w{2j=lY_gpvKAfvT+ z^$(dj*C17iatwc17w{i8|rnAS@pqR`b+QQ}TKi?Zs5Q)421MqY`l z{tW;oYwIlo8|)KOC_OYdoLf;^rST+`>ry{Ql#x%G;s}8OP>SU`KV-*69&W0p7N&_` z8~K?!QGkU%5^)qYu{Ffm%vx4^5X7a2y~HC1o4)}tGJelkbR6_{)K3(!lK-LzlNniX zSpW8P))R~VwOhz@VVW=#XW6ElgBuXFKyd5KZheJ;-1O544)Jfco(w2(O8~0;=G;PV zmynmBa=gtm^HCaA7}7ZC^(SeiecrMq|;Jh^Lv`Yy)Oon74o;+(h~|NxE%I3Y^X!|V7d?JfV(sMEkf^@Nsi)S{8|5q zrI5DXkenHFkpnZwI^_dZp*rj-YI27}<0ojOZoOGi`QrX`@W*9&YUQpZ&OdhUYto)x zL(PD0KdL%jvOQGFm&U^IEpn!(`C`-tIlM6p6Ec6*3=L1;m+)Gf% zOx`4iK)phy%lRNn(dLP0|#r|6}!i-%_>J~YAnIG8ICHe>Dt=nY? zNl&2Oo~86bmsPZ;8t%W9y*O-{y*tpKxIpb6&3e%B zu9kpzvKAn#9frDjlf!83J|5BpOw`6cyH2-(4!0&QyBdFHd-`f8>%g`1K0JP$E-v*7BWj;ECRr{(^nr~g>53ePY=h=`Y=klOFYRr>NguP6#=Ef5D82KruTra|77k;|<{T=FXUvkm z+}O@OH8qQTvyWS1*u#(fDo9*TeOnA&`}ZQloV#BR{s7u!x-P0h{WZqXw#>?nmWJ~3 zm-r^Y2t*`#ki%IhW)R?`9VVxfk9(?{U+umSwz zrl83|L+>y*gjvzmLD&z_2o8V(vQKYsFCR|`*~(zNuCd=Yoxxp z+aEhX@DuF0|4lGgwwmXtQ5UrOAgZ<|SR-{;7xDB&Eo_9)OlTaM|6pMU(C_%LcGXi> zGse;9kzHbI$AQiV``-EigH3p0QrwaOiGj7Y@WA*wK2B67GjM|y+dKFj3=wzeT!*6L zH8DIcs*vf3{`Zz%#lg zu#T|yKebmKoQwZoPI@wk7b~QJsGje&hN|#{_SgBi=%V5L*=ZL_Bw~taPIB(F-p-xs zw582Gh&FblyuK7t5`B#c?s)fA&?y8B1j$)@SieJ=;SgnYxQrKMixv2?xrblhln3^G zmqI^RHn({ge}ZsBEGLu$Axl3R(z(jWboONHFHE1YHquK;j5CvB^`i@iF)NcKdL337 zV0xfmz06?rCU`+g4P{E{Od%aBoSGAoDSiB_QVJM)3b=g z*8tz#6<0mMAGPa|$;S&?#{IDshj0f=M;f)AET*UfjV;X}B;`71&TQFU<)cH;S6vFg@sAQi{@%a6- zM2M-yDN##{FwNkT6}p66hfigBJZ^{xQKf}4W@cfB3$^L~?*VsY)y!(5l_jXIeSm(< z0A%NbXw9)}q24)eqJm6{BhVD!@hn9jWtvZ_47?M^r7J$NpAY4#sz+7K>4QaIZAyYmvB5?<6^w)qY3+(3TE} zIoCF@WRNW~vl1)gCwcF|)i{xK!@6cQh{dx{cdRa9HT$;~TR-#hpK3(=&{D4G^Kl-z zyGmA73WX`5^bf!%kaN2RN$t{J)vn~dLI(c#Tt{{7^Q2e~oAUQUDoJS>^C@7h(e`I=U-w&tiu;VFfOl(RL)113Il){KCx0ShS zEAxL!Cw91u7TDK?g?soq#+k35w@`S^?Agbcy?2PZV&scJOI#t(i<5Jj;e3X1c?XME zNb8vszaj{aJ&krOjBZ4)ZVLqn67`vNalpT7;Fm$@2`@0yUYn+O2tQ>=kw`@$2+=Q56NpG84NkJVW-U81@2_ zDkT2q`=>iLdmM&(!_#H>MNk0XZIimtr|&~QIFDA7u(oF7db&()#8W+sv1wz$OFzr>}JOPN{^5~1mHjZ(rR5YMKzv8L$zvwvo}*6p8}23g{N zLU)T}BI1VNPpgVG40G(Gngn?06RRrIbBs?Ub@ntEwLNcn60O!v1sMAV~LWo`=a2&2L?1yO7W%Dxim+RKlda_@ZY zTjY1Shhg4G*f3wme%p@=Td*SL;Bye!9L5}h_LUzaIMF0DBaTOPslI~8r>uVbw z&5c&=j+kR_D#6%#V_WABbT9Dd7i1)PrqArm`eT>%>! ztjQnpGIPRHKfZEFoG$Y~>5TgvS&FEQs|I$IZL zTJ$WBqj*M`DA4O#@myaIfvJG%cX>Rps!s^RaB2>T4py0bFx*;6LBlKE%`z5wIRK6! zRzRE}U&L&0Bmv$TU$`QGKjw4g;wKn$3DB^{)Li;|FpvCsVq5%SGiZ$XIXk5{8Q%jv z>hOBt-BOEt(mnhuU*NT$XPB9?RccDv$_3oolkm4P=UieeWQTzhjxR!*<%tksyQm^{ zl}feVqj#ZMv!x~lm9~kB_{oMHb;#aHanrg5x`F%RbcWc+M{G+GGFp;&=s$XH@E5PR zS-{(CjxGqSO9ZQt$(EM55%IB*=Sgjcb#mo1MI0G-?UhzIAKbT+!2L_yodkG{)kvG8 zDy=p_QwIEpOQ-8LYSM5SORxpYA30#UCOY)q2Vf$Hu)wE&rk}Ml679a3jRZcz+=!pg zS8k}E#sEiOCtlsMOhz|NJLp^|K{kLCN*9d0$03`(31xfBnJ#lcwMtHSAYH6fMy6p9 z(~`>|Kk}~2RC&-yw~m{%!6mxLsozqFla#h7|r@@KlLlNhJ>ah!5$Z?~8 zWFb*Fa-pC+PGEgo+8S>6A^lD?VKg#IcUy!@+*t-+zeB>8KP74jOu-C{8wTogQs)#o zkc<{E8-BZ1sH;ojJ9QZXkdgwS9^cv3QepkI{rA|=-OFL=h)ZyVbjO*YBThay z2vbzC@A9#|8ZGiiE5PN?Je>@is`afTF27bASR;rz;^yfce2>2+&gH%M)3}y~6X6#| zXGK)%mbut%9jh^}vVsiCfG(>8S8ky-p$xb&+&9$b)Cfp)%FayrPdiB&c0c73$=7nq z#z57vMu7QmoAQHBL9z`MlF-lPm|z@>81p!BbzfU2Fd7p6E*n(#3P&YaJesY{j{rFYD^hU-S>$`>&kI|WUiit5&4|hukPAiqRFmo!X zT5lzi5EGs0B)XABN&kxqPbZ_}&<1R$0cLC8M}@7ne26)5e)(GWeDH&6N^fAjSqb{x ztJ-H_Q=!RTlVRk`6%A!7UH!Gk2zA!5J{Y9KoEE7!5w0w(1T!RKG0x|Jf~fhpTpBKY zO?WNQpuOn#>7Noh%ob~rrAHd-7DX8Oa&pQa?w1?-0Kw;wfT)hZuHs=|#z#aY*H6xa+x(^in%~UCjhn{8Ug* z6CBo98C5FpwF=Uh$5EjENg<`ioxhFt%nduB=At0XoHnvE#ELr1hxZ;RYk1$09-ae? z+OLYNht0V)FbuSj+C6uF?l2_i>Nbd)n@6c1A*CRn{S5yE-u+xAQErqJJ+{o8OuwRC^ zzPtPBSJT6R`m3F6zjFk)iFHC+=#2IU!ykCwpZ4pawCG7oZalT)g4GKUnGsX3vdF~4 zzyGT*+gY`oXrrYJ&R<#?7BV5a_j4W)G8~K#MxT6^JkYTh<Nx<4kC3>5`@Wme*& zMAhH~x(P*7fnywarj60l>WBh#$^1`=Ll`e8;}7)&B9Y03q=J zqHcMMJ-}HNTQNk{d<&j3^f%-m0rexjC3xmDHSQ@58Fuk{6@ z!3@EGde86fddRrCDjCej*jk-@M0Wh2Htz?Q$!gwqnAR&8G5-?q%g4PqCl3)F{=+wWFj zA^D#34K2;$(Xo(LvJlCX!h=I4U?C;}^G|W_agS|Ogi;e-nkd^*FyO+6J$Ieb^ugL> zgX2~Ol)k>%Rk?IxLOoYa-4XJ*#LC^KU<2Kh)&ibEl*ymfw;P{T1p5o&nVBf`KRip? z&}?&H$GGZ1y+3-mtmr{f?DUvySKdbCOYimc&vdhn)>uJ$_vSPfz^M=2bN9a449}HA zdrIs|YBswtDV>u#2ZH{z;>+4}5wk&DAY7=@k2rk!d6n(%u}Rpf%W4W_Y#Xi92gbBt zk574`j;>12$vT|8UfX_WW;3NwdpRHBnTW7gKFGV*pF(b_FkV*ig@sxk{J6PeO}Iwt zOx$_)!i==12JR!86`l9IFR_}}$-ZE-BG9nlsB8E4-yUvEoXrOhiAWpP^e)%GLwFMH z2b>t1c~6R6O!U>5HVhJ0m|pZQbXQWO2kQ6#CgZO#HA+&n zUo^D0HnpbTX%JK2I43eIe1otrj79Pwi5d@rZAGv{JnBW!JE3AlU+8NjcU3Hhd(7o7 zT}mTlfqcMY`^O|JoSbAfw%lPudyfj<5Ad68v2whETwW)QDvOwKtB#G(B>q6!?GYLb zBQ!mIr?r!RO0)-Vu!e-5SDY2)xwWFV*PoT!B_}7t+r@NP3LpA-Ak&G_;Z9rg!3Ixp`_{X!ycu#2Nx|qjS z25(iL80bB+NCZoZy)#21L|KXX4B=OhiGt&Kq+S(LW{1Pu?_q_zkD8opua6sRYqnw_u}7G+bN4`T?)+tV6oRM2E!+({RFysCHT_JL^F+6$rjN2=yvI( zF2#KE6Y1nH_7z}$C8^0zc|T*hI8LMMJK!xy;GPrx+k}}i>~jroUv3>0%Oh>pfF8GZ z42$%Jw-N~26SaFvQYXRFfAA9x)w^eD9Tn%u4ITHOk_Exf6G14f_^;}=_{GebLHWle zbzo>;m!|yVUOR1%_!>3cEAi1mb_Fdpe)yMgk~0Yr0*7k1IPS{^blGY7- z;^T32!lP~YF?QE+HiWu5Ek#7CT~2g%P2l8ZIQS@Lc_xGg)|+^ymxyZ%v*SEqFI%Hi zS$ndQV&I48=5Doz4~kIUwq-Tq3ny{w6*n>x9S(t2hu)hK+>2Qa-_B$8*5 zP&>QBkcedJHx)Rs#KZ?-b&fk9#20Jift!c2QRu6db3p?FSWEOE-ki_v?n^o5(%Xu< z7h=CJUDRi?Ha-5jCRjZ>9$&i7sB3*YQsvzu`UMPBgFJ!J)V^9>qYqk3{FEpw%EIqd z<-i!k``CkI4IyV4jz$7B{j|80BV?!1omJqpwC}A29A#DfOV3>R2QaChjD)J&s*P68nB~pb7NlAALIl-5(%S1f>x34)1@o)w!%0;$1*WM4?FMN-Hd z2R}`I^gEGzumNHflA!rijKq_{*$bsdLB+rO5*@e|%urSqlAPPP)`(0F3dEq|{Cxc* z2(fCGL@SpYIQ^>km#3FgyynM<{Vi?w;6Ha-A8E_ZAz=SU{`5z7q-&9v|Chn0rHp@v zJ|$EDX+!_nJB>u<0!OlSEg2l1pV#V~YWkP>=N4E$e1j-38l19sIzB(d6Bhl?yYfrT z1rFLi?)lW^*YHM!CG&U57jQ?UPaD(bnk4=5>vA$U;Kb%Q!S>f3h(CZ!Va=!KmbO z)#7*7u}QZu*__Vn;U6#-{^C>^laIE#2Wkt987XfQ>z#4{wGbRyD7e3*J#Z^lt3_Eo zSq_}0@m(~T94!2PnHL2;+f~!J(SCRHy?cvrKZ+n9;#Tj8ud?MC?cW9xH^sXxAQih? zIeQgU)mJd*go5!y?IjQ@|B&Ep=73AI@d9c_fhVZ9^n6M8ucMtr7&so7j*y=wD zpc`*Q+!Slz6aFkZbyalOzv08N_m@b%vYiK3$^JknsTqfuA_V<>uh)%CAGdRvz%)C#p{;oCxch(M*|8}NN{y1>7H z(wRRuPF~i*_x?#g*F{oxTpbJlDe>hRbw$N%`*vq@7R7`&41#jbb5w;P+@D&T0lUjP zOxCWjt?!AaL=GO@9oxU$pK6kb!*Ia-)^j`v9vvd=?$4VcY2@3R;5zQ75Uc+O8X%fHj!nF*#6 z#o<9bD;V_IYj$@TvCtLJO#%(dhH)_>VpEQBihjlFYT@3qO~;~;k_F)HCdJxvNA$N8 zA*sA{m5NoH_;oP@8e*NqnA?l}h`%1k$RduXsZV=mY5TmmGv~K~3vWgZTT1^q!NqzPyaYC!PF)kRaA00NO0^1EMeI7yN-v6+CA{jwB?KqD z4Q(6Y_&)jg!0({4+I)6QEXOnrb_+8Veyy{)b)-uzoNs%u;a4F)E$7(;VLfVpBtvP> zVRa}a0rHhZQplU=Wp!y=mf0$#^@%)Io;PtyV3U!rI3OM{EaxMtl0omI14m-Si0(-+ zE9<7v{gavW`7Z6>wigQ*Aqx49hh|k3)AHrzvx$MZ?|W&%9C%gGxq-REeqq2Bu6r~0 z9(&5CPsW{fdiqY;Uh`XKzGkSh`NyS{^V*-qWeamI@0$`JTvq$;!_Qe<{4+m;yitA+j#m)$Zjli z{V+(IVyy?3;r`)vb8EL%pGR&)ZXhC!JdP_oJ*_l+@Q?HhW}5gbzaIvC1K4g2R&UI* zt<~H;vhnGtRuuUVR$D5w!4s&a6*X(FaQUS1T)?XdU)92^DY^CBS%qFvKsUUC&p!X% zCIn>{^6zc%FvTCOmma&waUy((<}`#^G0fLl#YJY8;05~yE|wl*5uC%#IIxFZ=L^;* zisH`|6neJjUL`sHI&a1q*njh83-64_I)&~sz0Jj9`ll)$#z+5(RHUWng064U zKSI-5@Gefl*rsL^#&#c95*bR}>KJG%oaGCh6wtKRX5OF-c?=f8?mdg!#Mo4aFDF%G z=xz+9y)IZdd~3x!PSYy=WQ;TNI5_>iWPxT{#g7*0xhbV6JDxi2NtE&qEh4_e-icz| zdEuVKKT?u)!~6OX(>(n?s2nPWhH?(=7CPkxAc?=RR(Kt0i;0klI&y(rS|2TDl-S{{ zz3NcTapT;mhxrgYF6D*@G#K$j->0la7gg=-@^F%e|$MwD+kKW|3Y{}yqelW~AjFt*@bj8zffqSIQ z->;qpAcCX+1^6>0Dvng6pBeu|%ayJ8$klm;Pw^DPcb;`psgSB|M}IAi-EeUfb_djgfChb+hk91YG;+E{ zDYbF#;MB}v8=}WpN_;WEf{%rCztq1d8AVCTUQU48c2x&6P7t0^RgFVD+-}+TiNpUuKR*|S4 zZa>I{olrWJsroH){esC{@W*Zte-C1Fmai4_9`~XQ;OA)QBVX7rPCM8iD-;tKd+2P> zVtqmXsUOIA(JPj4I-skCbDM&E(kS#XXSotxj>MwyPk4fdTU}>{?*w~RpY}*2$B+#_ zZ*`p3S9mAx?(J~*;uLTqV_{(F3*Rckj@DR4W_7 zn9P&;1>QiZMZG2!W|%BD1ejpnc};yp-4CV|CHt3ExTf|y-?hkj)doEr_S7$wpHm*M z`zW7z1A3dp51Ev(wl8HWoAcup_QbG94Qi{puh?E_6|*n$L>jCqrjiz`Ls`|AJKA7n zqpYK+e*~Y%(2!q!UQ%yX>>)TyM28lNj`B>>6EOX^zB}R;y>2Hah{4;O~HX_!@H_u8K@>WDmb>PU@-@l`k5X&U>v$?DQuwim_5r7mWA(RnyM#telib zkuBa;&w6MY`bSyIcM0)lO#VaT)00jm&y;^bw-k*JSQ*g1pALT!wC#HjqTNF~hgnoVFnhw*1pf|RNccCid4^5$4Y;Lr3>MDqi7T@qjrv~eY z&mmFSF|E}RGf9|8K{_uGBEhgeLy>HTzE`@Wcn(~_wJL+R!H=aQIOHhFu8 z1y{twqe5-A?9=%96is*+3yTkPF1rhBkE5Qd=%50zUL44uNz^X=>ImNDbP~f#YP;T&k98FHD+uw2+`kgle<@`HA3ZiFrVu- z{YWI5EHzniQdBK-i*YiJF($LGo5Ua!tCWQ<^k@h#KsXv-5=C_S1;P{VXBp;Xyd-yB zU>b{V0NpTf_V)9b|JF`5whmj2deuo_?LM~Bs+>;mdm(jgG`jnv($v>9&Han*nF81^ zt&m5^4^NmbAaf^vU57tdcpK@F0XEQJAU(|>TxM3q>;paKGtZMAlcuuvy+tWbC<8bQ{ngKFIJoE_Z6Nu2`fnJOif)~=)RB*O0 zbXReWNpn#|#$N2~U@!K4pVea9s>83z4{)>o9zJiTB{9#Y(Ab`hS*_Q`|3GR_FU{A5%+8GN%)fJ7ji? ziBRrV-S*7)WY0*7F(-NpEj#L(L-yzMx^;e zWy%ysP1<4qL8o9`TQ;t{uYfq!Sq|om_B0J)9_g4hPDD^8Vw@e6&f3QHA28l~oLCxW z7)Etx+iScyYWmAmjFgmhjCIoeoUU(uz&+USVkMpx`{Z@(dD7y6^Wi(o2uExC2~JJyVyr6zAdBGpIjWTC1os* zK|sE`Jh|JYKMWqaX#dn>KKpKBzGrKh!V4YB+1}6oVh^pf!3KpF0btt0x#!I+4kP25 z=i5L8lmIN-gDOmYy-&!%d`Pd2>QHQiVATUzokr74>kY$aHS5cZhr~noSkG^;cnRsv z3bZ=13*(@bmYmnC|E_$o3^5;A-#9O!q=qg!7|acE)Hfo^n7o?#M{OoV>oD1@TbL9J z2TQgRY`%cdsF-F2e@B{!9`0l4OZ8Ryb)^E4)uB~Loddm+9+Y~)n`vx@)^rUzk_Vzn z8U{QB-O08`Y&sGTg@m*H_BNyM+6-_FhK+S$V1`ia*I&Z-EHink7&DlHhjgFNvScCd zPdeM|ii!65K?Bc-7RARoTZ6GGyy&Vx#-B8C!~cy&c>?c?e!xbt66_4(+|iil-W`g4 zld_54&2qOf+C4v>!`H89hJm3ei9x+|gr~>Duy1+c0SHB+|FV+%R(Z$WGxVFizKMI5 z&IqzHmyCb8nhKn}9?>cF*-<7qc38?PiCL#azP?Xk+1@ULu4#Kz&OU_Ud6V>Sh~QL3 zc6GbDtusm3+wUBw`Rx}MJDd2ocUV)8f8;ojeeP}3z`VV`CBtPN%%m7l%TcFg~ zN7o+D^#1>KR4Ua@IdTc>q;kqF$+fXleM_aRQtm7{$Ssy=Zu?Y<5UU)Q#8!!s%P9BD zR&Gnu+~>|r7IWFyW@cNz_xb(TBkZy5{dvEx&x>nc_aI`tmI}ExqSUTSZtuXy$-hBR#PCY z!0h@EED{lQRNE{hk;E^KLmny>A1Zq#^LR@q<2H*gpXg|}NtuQtTbO{myFPTXuExR~ zd22GIfoPA1zRXaPDtK|F$D;yBiA(F&*Vhp$6TYow^bJoG_Ju2`(&g@)iOx`+cKUX= zpjq+C`@_0F1t?caPDX%k2|a@+zrS_r$79mj4{y%apLZXs5?nB=HtzmOt{R5>Vb!C; z>r>QF+l)TXj`RxnbdzO<`iJ?RB7LcJ3kGz3XbTlsD+7 zkSZ`|m)fBs=ee$*Yj`#ZgWEkq?NZGC`jYqP+mF~Z&_pCTWzrNLw;px@^i*E9@0I8pm~C>%XnXA923uK=kj^j}a|Q zWJ3ZeUyyvP!LKbx!UV^6%^ z=S5=VLFdv7)lZ#zJ~Fa#Jt_>D3~58$ONL46_y;(14^jB7V7(wj@Ea+fURP&^rS=#X z&3=Moq@^1)mQjHWelT+>X*C^m_SGv}b6LoX_zwOO-UW#7QC>DE<=m8bd|Syn&M52r zfGARnYjzJ11RC~Z3}HSCfp&W?mAK5{d|A<W$?2Z4xG}^L&=Nr)Kh1^^IPEn;Hm@}*2zMPCKO>7uoVvTDIx8kE=`a4hAu>z>48mqR{b>d9 zFK)}tZ&?K_M$IQ;N1ln>hdh<5IngHXsvCRJkbvsNTl;bSu;stySljE>uK5%X0%Sd2 zEH1v+4YZ}%B(NCCUKcKj2aFP;z<3FXM@s$K6(?S_ep%oHq&C~#=)vh-_zd#6)C$OF ztOyEdFvsWjMtr9mgMLQ0`v3m9@2=-tSF5G|S+QH^GEh#-7SjJ4xe+S^PRA#QY@70KYJ=>GId0PBFW2OW1@bT?hoTNs<*OSR&6MBhEK{;DVd5_tvK z)~8MVY!I-zhz#ONUnKnT@!XiOT`77?^i#%be63~+cydHCWstUrhfYq+61#*F=>)S= znpcJR7SXr@DyMjIb6@pSY2yo&}3%C0*u z2s{!Fh>HRk%jNvEY+cRbkY(7AG;%srYW!ftq~Jgk8;-g?xC+GSocCm0ro&%pg-el0 z@`VBNtXuppS%>XhKXe~58o0`i9;YHX{G*jR4XS?`!3qEzOe_;T57O(w5SNh|!TxjJ zPcGJY{4Lg!VmS-^nGe)3*rEb;O&kMhFbeIJuT};_9`U!;KL`QvAEWWDU|f-jujrwop> z^~t2>ka49GBxgqZ%68H&Y2#x+y;K3_KOIU*z?dlKEnP$2!X{kE1MTtw|{_qehB70_U=d&Q#jkQ zTBcq!RybXopwCdXWyhjD7&rsq;-h84Q|@DYbn}mVOeY*YY9Y6wYx{-=z3>Yw9Z^!# zzoxPdlU9IMB{0B2w$;>IU2|_ZO8diEk1d5~H4e$%PTpxS z0F2omncF;?H$4E>Q<tP6?13$}PU<$E4W`EBv1jePIxN%?0sKhWy34zxo2XXvr|*|Y+e zR#;MH2YP|JYhkHiH*8SuyV2X}ov6T+tvVA1iyHzb7q1YkfZ51SXuA?E5@^35Zn8#w zck{k`O4INy#Y7MjAAth2Lz=OhTXz!^scVwZdPa94&@yVskhQc6Bid;^<^NG=yDQz> zvEvwNI)~>}BoL1uS>jlZ8D`DLC+B72mV@c?T)^|?NG^pA1zeY(>s%CWM^X?w24R6~YtC&sCGk8{emkQ%oRke;Nz zO5myNd)6?OyM(iHes_jkxj#JrBk` zi=a#Pf70-mu+@b}Eg#&khcX~AJ)JU3p!^$X6F^^!{(0Zn#!Z2q z{FZ-XYtMPH zReByQ!&~@dTan{rvVGh5;jRjfyB1KswD(sR67&3U8ofcdp#p3w`b%4;f(Bn8r!~alZa9IC7OoUEP8-Fe-ePCCESU5CbJ%; zBKk1UY%ik59wL6C&s~le>nNd7%y)cEGX;Vv@S^Xn<}?6$G>N;WjQ^yZ}u!#`=>Tc&)qwrWn}Iy zw7f9Ok#zv+3Ef866Wn%Ln|<`Ihrt?>3xo+bQ6U*7$~yKtXgC#Sr!}x>8-arS0eKqk ztbLnNlXsTti0XSmDcFc_o6-a zLJBw-JOR>!5F4br67eT2DHGFnLh3oC(LBP^r3&2+?Gd?s)N2}xaaBlXhKWm6c69q! z+)3jZDm}>%Qa5`$-An6 zXK`6i<;58K7j#vi{^rNrDPXdgPy(PAm+Ayao}O@)frs*KIKt}5ZlOhV4JrpbGMXoF zRu~46gHA_Qksg!xi^9Xy99K=P50_QbrH`e^4fqns7JH(l!r0E3S*fdIvxi%FsDE5{ zYf@85PJd|@|0kBBJ6&1GMgDOmUb;;NYV=2B4qBY_xD~!Za)fa^S5(XVi_$UOTHufL zty@J7Wsa=fF&C!@Mr8ez!x`th7AjO(BN(#VQVk9D;cmP{3S+b(;CfXAOK$SStMx&X zG5HPk+mrW&C}_)P-{o9G`r(yqrl7>Sd7%YZfq&`^vO`@mpM;l&wj=XHr5`mOnvb7g zj@|^C;P=1eCH$K<4~%2IoE#R%mIP@Bf63iwcaj~>P_aG2r9Lg&)e1GG2V{8ya|^Me zlUwgrKmy^RR}TZh0Tr-R^*rnQ3AMendg|5K&2wK*GC6k=r=gCSyiY>7p)l?w;s(rr z-D3Vyama%SII5YrGRnx7sB`CWYFwWzM!Z?%haLXY*}m#eSj(zfBtosp+{#4L?%!cn z5S{Ln5Le4sFzhvn7(uc|| zOauwo9fMx1@gOeNTz@>!QQMETWqun!ToZ46db`ewRDr}w6zBerdytTvFMS%D6b+9TD`IBA_$4aCRPK#S1sK zCTB83cFlHBx0!^7P89mhE*bjQ}S3-9n11)Ppz~v(kVMDSNIRUUqQT|2f?FU45|h)^e0P z{Y=$>H>LM($Kn0eD-|m;d;!$KCM_?~UXybhR08w|lBJ}-R^ssDdsY~LHC{B%T1x_5 zodeZgs@oqZxl-td+uN_V;0gRHtzD zcg%mS?iJEr25C`qqJ#3VugCoEJ$p&*MgHg$(n)qK@CvP=E;=op=;o}1EZf?=SNN#~ zfdw*t(wv;s8lrj8dwYz=Zl_4GuKy^Re5}r4eIcf9fay?;@Yq_AY*h zdgix+lg04&#rEou?c~U#V2v-i#nXAxa)@(f*)z6~uc-jzbwp=QpE&io_WA|AWig5b9~SF>V~q; zrb|{YF5P-5>E)NcNyXrQyH1t_S4Bs5lJhFWJ zIRs-#Z>2az1}${~(biA2*(DoEC5SxZmk>}<9KYyVA4j63U`K5jrFepd_84r@?e_YM@^Egy@ls;hrUNj#o zs}+zHeHLf@-U4CRGv6mx*0Ppu;D%jnG2|jd#_`78FUgXJ88a+0ssoPZlI0P%&A0Ve z3C}N8zM@VJuz!wg^vz5am*S<5p*j7O9`l9*Aj#K-kwFH!`8@_RI;%V!n^6shbMYkQ zjL<&@a@#Z1q~OPN|G|&c_;utwak)=Jybi!|YhuQXsPIT%qL zv1B{!_3{SsYkH~*JPz8aFuNAnXJ-1LLsk{cnPrd|DRH&+7Qr{4*-ndr5+Iu}!Dy_? zShqW8yWz#L6JYP252bIyiV`9FiZ}VQjpK|p$TjE6zeSr9`~b9ml!YX((U?GSva0vn z)rh9xx2wP8z?Z}6>v3`(o}IQ`e~JGk=PH3nm7ad9muUya18Z3L(76bUg^}4F3a;`z zOWFV5k%a&c>AX;iihO#DDg9UHf8z~VO6&Q;X|Ltg1w5}rwW8KL_B)*|i@Xu`K70># z#4 z6n%-H#0UNGSUV=8#Gn!cz4Y7?9wL6vJ{u8kuC%zT5rR>RZIxT9A!jgUPof0ej z8i-Scxhro|zq829*%1k#SxnewF;LR!F)*)j`|lRhOG5A19)GI@pruGOtSOg1sWP*OYXo)?6dD*nK)5)N>r2QEI#`Yo@PRcPU!Pmh%us{LUnl&n=`93R-^J(!Kdr9rk2`;q6-PfMd)0S z?5n21N_+aQwpZX27z!bFpKqc_X)2=$Qj#3$qka@{f=!!#tmFsm`H7>w-_DC`r?W;h z*om@qrj|tqHa!Qi!=(t8T`BM2K*rybe!SBn!w&`K2hG45qu|7KQYe91TB z_Q}ecYC7`Dl1TQpQE%zdozWQ*;|~YLW9JVhIiOuG$QJbvlv1g+D}S_pswytfcK$VYF>jC4=Im^T}UzhgEJ`<59mrcisbcDLq+g~ zSFd?Sn#L3bhaI1gQCB2X@%VG56)s3!Xptx7Tg8{tgIAteVFpYi|;| z;v}+53gZw-%3Xu*r*3KmTW4wzo2vz6c5%nfqVA$ol`8XTkw~Q#_#?T;WbfN@~U1ap>Nx72}2-{(?ECpS#p^x$Ve8 zwf^q&hsH)Ow408-aP+f|jQ)_@ZncLxgG&?z07-veZauR)0fFYBuf|{Ky1O^p4eC0z zS}pW=o|_6D>zKfp7>l{RYkty?P@tg-uHOSmyz*~HZZx8+bdmO;LG?DW`*_sphP|9A z)un-)5T`T^NrQYwtF!=z0LvE@3CjAVXH*D6?X}yhAVoW!-&VjQknsGfg&rm)H#;@^ z`SR%&X#Y7Hn^wlx)*u9kV?QBt2hK1T_g8E9-pxy9Z6AR}Yr%J?HgPF1rT78!a~uHS zuPLaXD#tr9d2sutVuXi*6L|_!G3TtKcO^3CUy#e%%Y>i+b_y6d-pYi=8z_YWa)}?f z2d<4hn5QW=n(xoiU+H*kW%pXu$#e!+PGJaJEZ7q3+-$}oQL;Njw$(Xo-7!Gy*h+t@ zTZp|kc{a6B-ZwzMuvY*}u#Ar}wi&~VUTl|)?^7sPv)N3=u?&tfDZeo#kP^nhFaGO? zZyUo}ro4lTucZyZ6(8HtcO|w|`tP!J_3uGyblmyo;GOuR8S2Oo6x#z}_a%K!)a;tp z1ZW{Wz?lCg2U&qihv!;OQ3?=iR~+-hdD1z zEDDyUac|<2Rc@U;RGbxg9MoGi)h@z2PiUp{&%szuLKl=??xen;##5;Z!A_IKG? zUHsFG3-CHW>+;{^kAW=ncl13?%tQfSSq5&A5=9>y{K;B%AdI|wteNz49RATvJ?EQ* z`#Lm*q(cH22padn`o(}cSc1%2;=nU}Ts+gmH~by@70}ju@2=m6D)fqv8Kv^sU6acv z$pV@%cxKf0v>t0*tJvWMR_K8ZH8XH%EWld>rKV+z==02g6__^5N*!jRR-tEzL zDr#5y&A+DkMK|BCD+r2F{TaX{E6Z-gd5HO<4uC?_$x#G(Z-1|hPUP~^#GrsJi4;55 z9>{ma`p_iN5b=k28B12fuU=t+PD}utn#&)9|wkDE&0qMBpds?Gq?+Gr*n z=BrLA2!Wo_nX%|Ta}e+HKA#*fQ|W%)9c{*pLVa{`DNw852hoJ@LPATF+V`*OcZ2u#5+rQ3FSIvnuUsjw+RT>6Pn(J1!yi9MVnnktbBkQ( zx~xTcqZzHEC!T`@^F4rmwzBZn_`M<2;#up)dig#aP>POfZa``%Tc#BN(~3QP_N^!K z#)oe$E(Of8YsKR`k88MQwJ@rsn_RtPOo0p9y{yLruRJx54$A=CScV`(tur1(U+iHr zihbg`T1|$K@vuIbIq=XDv!f#>}Fs=e|p>`-c^*!@bvuoM6s!Ez=U&+5?q#9GU!=oL`o zw?@-=r%kI>2#!l{ka&_6MD`GLe#Q>jXq7z!vj}G1``coVxslrhHXBP0cjM+sAv9*W z@rQ)L>oh3u6Z0lk5IB#tWy6;Cz_vF>%jc0&-;rv&^YTBflX%{JJ+z4Jr*5HoscSN~ z>994!Rwd`X#PM(5nP7JC`DfHU&Qmc%sPNk)H0+_{?H#Qb-mghpk3xoB%g{Ed_thD^Y zTlNp{zy8^;eKnpXNxuH}?UkA3TLl9@G6Yaf*V`hRp@=8CgKVCqyHw7c{yFgyyookH zT2Od3Kjo|O)#QgHmiemiENhEx~J|@@q(va+0{JRP+?pk1H;}ggEl?mmIdL z-r%Q1rwycza@ik0%$@#Gg^_)n^L3Ttnx@Jxta&&lGPb0iJp`FgQlQA+6$hcgn6R8DNxRA^EmD&thX9!Dzst)h9>y(men#FHcjY?d!@Hx6ea7Cq6H%Um!<_l9Zi zb*p^%OYTyC3635WfcaAI7N(xIuyy&cn;o8D`jS!p5CsoPSQ^;-0h@H*}-wxZi`3#O)!*-Wt7G7`)f6=XHr=lI< zqhfsF`(QR*F|mc_onSMC+bTV1zJ845Mpnq?h3hET?tKL~l?|pkfRW^l&DICTHtEVf z*y5{SM?>(tDJ4M7Jc?OXSe3#1ToAB%=#aCwL;p z_6bD+S)7QhfkdWzg-`k`p+^&{RBybB+m=O{YQP@c3GT5`x9XSom@t*EY{%Gh3i9hp z{~K5}Jqt}AtJT?LBM5sy{Fi%d8{t!-s(jZ@89Q}=+F>u;BmGc0F$7~lv6Q1CA1}pO zN1F?by9-yR=hPv|e#_aRJV?V{kh%^`r;~Jt@>7L7#&oIhBhS^cZNqF&Jvpy;Cn{X({`U(D`-K7o_ zl=Hb0F0HuF*y>hO%APWnRsZ}6nM=9qz6~ia>148(hkI%Csa_KxK_H59?j~*4zc9Db zdw3czEBh>|b8MpS1`Q=u9?NN3eKs^F6ao)usfvm=k$jOzuVH6N6r}Hw!Q@2fV@`fv z)Z~P!$DrSU*yGK6m!H|_L8$3u@eQzE@&MBqv}2Aa_1_=Zx})*yICuAh(fZ`L7Jj`0 zr=*JVMPhj0Iz?jJs_N3fPrpv=YBenK3_atW*fdpEB6aB~5wA5y68{1Kwos}+hmNOK z=^?lAHBk!_FvvGq0oCjttflq1qw@OUG^Ykg$Inj9_;Dpjr|@8#&q)t7O{AH2gz^SJ9pV` zgE4PHOCD3$?6ql(Po29_X{Jp@b5c``_+-w6Y)7W_--%{uo9qy7kYLc;qY1Hg79&h1 zuY~UG;DBhnq;uVw7oXvlF9wWe;8mX1#jzEh+Irl%`Ti>KsPd#o;>9z9n z`LZG&v;Y<|8>2+I>-(^$Mu4kCr9hOy?LGIQcR{dlp{H*+00&cJTb`5CRH7I^Z;J_7 zw)+}^Xn%O&EG9G%N4@woN+7WWmW5DCgZ5j|>gVylxZyR=-Tbr+xH2+x;lg3<6;_m$ zfAjjG%O^KZ7k-yxtM0o=&46Iw=rt&RNz_CPWX?k-iU zo$TX}x!m!YzOk?o7+e$3d!x6uab+hqWOA*q5PY5Of)Z0;N{^aHj3TrS0~I=>-Ki!w#I|HU@aX@d&^N(s2}SY!qz32is3u$xQU{g2FoJ+wzL@s6H$>$F8lS*;>Vuf!7r zIQ6?b=7u)!HEp`X1XfA1c4=W;tD(@%PIt$XmMN;4wFY><2y2t*GXQQ0OU4;;1=MEx zV;7GHhYEvAu<_S>kDma${JC8)QTl8oreCDsrgd&_-I`Q)906u68waI?u;5r1z-9vO z00^E>ENdO6B7GjM8+_!b>krmH(Lv8_q7bfom49jL#yH_?}c|~PDj0xK&@smESKqvJ!N>^JV zqHRk>cP746T0$cR{2a6M>JI>x3f?V2(_~Wbj>4Qb7Kw#vykKhC8 z)ASwv`z-<({GBiB)4w}Er=An9CYMt4(_CJI_bh;e79AqBp*D%4TGZ*yNOe(Mkdg!7 zkjzhaALp!J9Es~O&2h9nwHaB}raHM&Q+mu8-2<}3#9%L)x219b0U;tcWk<$wmK0Dy zLRJJxFd}^;eP{XLX*MAw1asY53Hl6zj}I}PT0z7^w~88n=co&Fi+9+3qU@y*We2R~ zD=lwr^sBp5BmZNgp|Cx$e7ov>Gn{9$)^FG(LbLqSR&UY8oja>^6$Jnc3ov_FH^X&% zY_`KLw2~muxT&b#69YN7dICW>U8Mvuc^H$?8{3SaKNeNTn%8q)R+&ue&b=gWV}=Y8 zOMD0299+t3y;`SUH0339yCk_`)hzIKah{Q%m5@X6goMV2HML#$hMyH&BO%Ze(%urI*N7s2#X`<(DhqrTOXsgtprvpzI1$DzO7yA zxZ`Q7&DO;8^(BlITD&vG*7iQ7h{;yOzG-#ROP<1H7SnYn=N?66z+mh4E6p&0%D&InCl|Ho zA2b-pzLFEa@toa4GTj`!1^-S63HnTEh=(n<)+{#A*;8(wU*f~9i5GZF=q!UXENj1{ zPuRy(u+2QenTJzu$C{^|0>M1#6~a_SG4@JRarKsu7cE;Sxyfg4mX)d=z zCYm8b&e4|{hia>Psoi&f$?ciV@~9@cv+~bvsQ%6T&j$O=+L)kVo=mSz@dr@1W zeR;H4i6JjhH(}$RP?X;rXKc#1yRU3iu=l5*84X6>kpIC&_vk*vQlHF*WOFUpioWJ) zosXI^1}cWlo;-}Wp?v3YqX?WAzB#ni;YEw(u44Yx+dTA{srasrHn<|zQMnCEpV!j; zaun|9oUapG?hFh~zV(+DX?AV4A>oy|Xy??Qr0{Mo@x4wAoHccq>4@{4I=R9pvyr!@* zTdC_PeU6b#4ImDEWLDTAoOwl`Ohfk<>?y~_+xCUYe;Ug{+J#4BV%yHkR!u|}>-V6~ zt~YqrZ-TgFCi+22BbR!)Zrw0NV5sb}5#T{sm)h)-b75<-0(MCImoFBrp)uN_GSFW> z320eUQ1TH)_&Cjfpy+6SDa&j?3dg;&JhcT+b^1fTOWvm(_8ibwO2oREs+pFnW}hIR ztP$@IAtm)od?#@_+==Ngk_-3Iec*m1OaZ_XJRfi0x5~BWM z??>d%jve=#y+UfkeLfUB$g80h!vS*aSS!;*XRfD(X=Hw@;ex1a(Dz{JZo3JGmJ^4_ zPG>)S-5AZx_{gnp>cQ%?jB8Z(ODv3#ekn{!-_G;wAH5cU zEuXKuV&8m4?evMS0gv*~_f2&L*NHo0&NY>P6(Ot-nLj=8VWzne|CnbRTjPsgrV6|gxJkQWXMjt` z!~*;pAe$PED8*rY2z!-OJ~}!^`bEi#CD%n3o;>!aEPM+Ti{4&he_cjzsI9K*i606! zFpxeOd1xu0%_f|Ib`$sT@hX1fX1xDd>-CtFsMrUgE?0p`Z1kDDxh$>PJdYjr52UY( zxb$4&gZV5cIu@`nglUux!;jrXsB-M(| zeEUP)Jm4@v^&@c={LdyP<)hvScJqiq`d;NgQ%%otxPB}1pjM#GKq-Bipxla0{;c7d zB?`-$1|6N&74di7$2A@6jw*?*%&X}6mmY_2QYsT&=gNke9>fJ(CDIi86SM;;4o|c? zb~g(-MUWyVvcGz$w(Si`erZH2UJ^}f)zYaS2zzp)mbh*Zuy6uZqg0LL9c|}xvQw-= z+jRM6!1FHD?2&Arf23Ra+AuQFT8ncP1jff+>aE zx^LSIp+<~z=W-Gjfn)RuN1@?%wv(1N%kIynuFA-3AurGB>bJZbEvpIi*{i9d>5`J! z@JGmCrpddUC7%W9;gsg?+@`y{qb_;|D#u&E=JKQynV7mf$fpmn+E`6!Rf*b^mB+2X z*S*p9Hwa_KJILh?dM+mCC5z)h}Q6Mkb7eQ))RYwj#XCcxTovN-8;e$2>MQPn4tEsr)!XZy8g{Smpmq194638`$(siWt}Ea zF;p>vTXk4LbMN)?U7I~lP|Ism=4_Uy+O{CVtY9dz8_Z51fF3nm@-22vuTM4 zBcyb%=9%yKb*sBoDrq0Pqj;zQBj-E98t`^?h4QYuYyS@B=$83_t-WzRr|cn;?ss~0W2MUK1S4Po>(4WPZa=B3Q`S)XU^hM8siGOF#2iMZO?&U><5U5k|yrdM$t zK``+edntuP0Fzoy*yNpS9lET8cF9NVDqVUmJhJbvj7ePxK3+Fu{HgQJX;^GwLYs-r zw)EtYdq4Tqj5Pe42H2Emcz5&hY_UvUatzWEEZ{>muB_4I^HG5?TJ;*o;~*2-c1!sC z!M;bfGpyBBE#eb6ic8mQ9LGnd&$v2A|BSHCm5=R`!O7+(>xNtYX>gAHTIe7yqCp>| z0YJ`6Slqky(JbJM-UVaND`<>lg4o?*-p2gG`6lkY@Rb7et&-ITiVOg2w4%{4K7;CL9tPy*Amsf91 z9Uj0VE6v|~hm|ec$Ek_+aWMes*CLBVP8OLtWRVu&zNCx)D`5h&_sA_ka7XxZKmmC% z#F9r>M#;9K-))lSN>GKWP#;#uMe9>UVk99djK&&`{w22_Dq0R!ll2i*ObVW&y?RZM zqB9HUKO=b;9^}Fb&8OL;j{QjP1;v07eTZe2Y&aZz0PIAq3hJg`3AUTl?wq6JVgkgz zyOh&JmprPpcL^d@Jv1KQ6TIT+T_E^y7ySIre>H0yPh8i9US~el@=A+Q^~&(_t5aos z!n*BmzE%3sEztVCI41i$YRQ#YMpcs?03Pzyu??qL(4M)YgFhp8pH?cmKHOYb3R`>37S*R5HK)HO)9i~|3r8=SoGbTz5ELfFbPfVZ z>hG}p2)1Ux91cIO?$B{;cr-Md=`H<8_^;0dsg9z1T%F{576ehS>G6V!EI}s-Jy7w$QY?s5(M@GKZ47~28 zbV~_AI9gxI@!tC;5H1shcA9XL=(J&r;gemWYPSkwEc6-8Gt%$rVYwjuOfBZNY^?g5 zr7(*=B+=uMn$Zb3E$&2<;nOVr)*KnJU{(xkh9y$=RLi%B6@+be&-48)(tnz^aHa`B>8vB0#?2~1Ua@L10Q zZb2iz_r5nW7jzJ$Kn##}FMHwtoxUR~f*e&-$JfdP9DO81b8Xh`$G7E$W4l%+chP~T z3rHz*B{fsy*1*I zjxvQng@U$vzlF zKSJ!XGTBe#h4qx((Jf-F3G#2XuWseN+Qr%hho(=2N;n^Sn8F2)GzMuea@=E_{uI3y z33=G;IWg2t%5_|qT6>TRMaWa?d80@1;{prGhZ8AC@mOS(1IdbjCxItxN4gz`=kx@x$RcAR3LD6}g>bK8rF3J|)MVrggw1$j{&L^rey zBv3ss<#)meWFw9J1u6Ro40YnrQxarefWlUtzk=_+;^Zf}f8(5eSGa6PUc3c7bgsmo z{<&3xrEy%W)wwfNRt#jvWykB~QwB^F$sb)@U5BM*Y+~~q5auk7I0TQMCC))47P>oL zvdczj9^blqLe{Hg(A_d@7H5!>HrX2%XjB})TJpc2Mw8e#8qp-zTAmHLi0Dd)lV76^ zCj(cLjT2<-BTXH1gFe6H#(8%xd4{}SLKolED<)4AKHheXZAn*h9K7)ps=NvP)OGZJ z)YO=+tCBRvcZ`8`!9+UH{0=65$n3Mry+n!oJJ&h1n#9K6;I;`Q0z2hLru3`G9Zh=i%cAR_LWsgh*eK%I!BVO#X@{AN&wKj_Yp& zWH!7VmQ8lKGd6zGclzjL$lq*DI}|9So_`^8m0IY3nCG^79HH^fMkL*7@hZQXzIUtO zCBi^yOAdF?%}g2_O(|RC2WfYOy`R##aQA(Y`@y&w@X(1VRpdwVP+25ql@>O^>|2CD=Qk@jI?nbgMPbu zct?A>B2_nHQq3kGyYyY*&FY*nEr9SxuR+j1s6$#gAxQKOJV_N1rD4=|tewj*>kk

D|O2xd>iEFnnnld5w+{Ay^MYc`^Qc^mgCIvD6MD%Qbx~lTn$E;H7u>YXWz>?d?G8 zj;E75V*#pne>G*Ra>KJ+VYPzaGq`@SW`TE%#=iWQBfY7ARTWF=ky;U6@u?je?kAp zma+Q$;cmmm#@)8A6VOuNh#|=i>Co)yzU?@23|e%QW`Jr7l~7T?_J&A?NL0JgX~>{* zE<;N{OV*=l>P%%#NZ3D(#8Tb)PJ7{HE-F^3voG5vlJUC80Gq6F+eFTx||_}y0D)lH04EyFyGlPl6a z?n{A=JMuwmB1gB_@Rn12@j-wTk z|In2Ke#vd7Ab}uf#uLfGN$Mk{LbSFt_Z}O%5t05Q4{=s9H~do82I#mBHW{+2w{Tav+jaG zdvU3<)KxjL)4CLF(2&E`&7PFX(!&RRRYyKR%B?R+yX`v;pMJ~}xo!4F0}-kKl`Kyn zw*L}6P(~ciEH+REMuY{-4to1(|l zFpd457}OgsUlqlAJ{S{;af@Lm96EMsdnRiFAW5{>81Q2EwQhiV0|g`9ew0y?KCMxc zXy3ASsGf}E%*5Q*MHFhl4sJ9TL?qghg$HS85R=Qge< zh87LZQQPXV>kGTVRW=X2W;}zCR<3cmGkCh)?1vcq6nLv^Y})30_Qcjq&$RbUp6)JE z`fALWKrpzb(L@r?C@811W}_m_=M4-M0hy;ZcV*Y4X|O|!OHRFN=N9XD z6Q~Vi>phixN{OK3NppK>7b7y4=x!VLK>a_Tk~K@y3=mgkH&$t-Xxl85wUcDWt8oQ@ zI$M4`H+^O;-!&*iji9Tz++<1?-MaMu9Kec_^`x$o<~2GCt(TdvV0TsqN0AjdU77A(y$s1%lzQJ;KJ)koYu z@Cnz~(|FAGb^;w0(N#-+$~>QRDess=`R=RKM;EAH2o>?cSIwLH4v-Ltc0=HHkH5p$ zdw5b{WKV{R*mmQKW;tz5;CPzW%3XFgr!Hf_Y0*U(ZL5`AiX4wgN-h8}-~(_NhL1Qn z9=#>0>6blb=pbqtNYongXYVu6`^CZ6PY9-?7VX7*3#BdJaUrD1Tb=Wlz0B8zGVqUpSw>aT4-qr=h}LePF4zrm%F~Lhx#9dIAe+0x^4jDj+M{=8nj&Z-W*#`?h}4Va7@*1Q~O^NW#qxy!fK|4`JKU zq3J;B2?pWo8&bFI8`AB2ci}0)nlxl}Qj&Ww9!mL}Q`qCTI?hDlwXfvSM4F#GS>d7eIEVYQGNhGHHch+4+wFVSvFG9+IO z26(01SNd`H9np0StKuaoMo;~4kMoAUJmBMc+o@K% z8l7<-GIa`nRy6z9FY~+TtT7dAl0(GKZC(j@CI0#VPLUuFJz?vf{NC?xi0VY>%g9J5 z+G`PT>}s^(TrulNBOvFFRuI*L3U2n_5CuPXK1g!g=9*)+v3?x3it_}@KyW&VBuH9mgE1Z-{7Ibyt|+*TJ_NbP?6bN?C{Vt$ZNx{#xRl3vk^Z=wu=> zQSmLQ%@UPkdQ_YJ3V95XuAK57EUj0Dq?Acp5zLV@B0p4#JvEXonV-i6VO)M-)t%ch zx{0jkoGe}xx{eo@HSlt2R@|8R+#WnKj%h5T?h}ub)BsEz^H53Kp@R}m4t$#^vL~oj z6UZvvtI`=zY8={S2=|%^uKs*)H&6zl0KfJ~pDw~IOMz07^FgA@qOb9Av_|URpC+&g z6y0Jg5aSo=37PPa58ywOfX{Uys$dY>48LO>-Wvq~c!m8;5lao9xuRrzw$-jCM6VGj41MMlKGTPHUYA06S&wbtixf1TPjW4$ECnZ%Pj zG98|-RJTupqtaa2ilR@wgKhi3)&XY7$j!j1{%!p)vpjZo>(Y8?#CoXD=}mTwAk6Xi zH0@{vhezE7H|X|XhO^TvWQlzWfzbs_uo~HzS-;RtG7W`0anWuHojF|An5SLe0cNt> z@D1SON(Kelu_m|wV5aqL;K>jBpgyYO4}B5hE9W9RjSRdV?(2 zXDZ^0CdLd{=&+TN!y;;pt#cNm&!q3|nL}>7=?Tin>e>{AIz^f=8%ta7!Tug!>CrY4 zV-VPOXc`sw%G;a&SA=Z0)qa+CEX~(&o&Jm1c1Ho*B|Zg~DjGgfp`V)b+tiz$$`AhD zUder12A3vPjSw-x>Gt1zk445BrpHIf%zkhibM4O?!8ZK6>2d+qip&Yf{eP3*Yv6CY zFNtH=z(sn5@=P4(#&>Efi;w6^#PkxYInuJU;uj$ziB0PF=T*@2$S^@kmq)sEG(nJ>m^_T!7ft z9?3tcIkh?YbmX%?f7su22qeNhQ|WRbq(eJ+;hA&PogsT|^irOMxN142mxqjw)zsAt zT5WCJ^aHoVL3=HfPk}&HAk)7AdlT2`Tt4INflhYfHw_6<_nW7*SK9Xj=eu~X^9W=u z+{$`?%%Q(G&0E3;CK9ZODCuq^uF{gJcN{@kBv&pcP{s10#t))4NRAJ{=y?JYoFj-_ZvAWS<#rc z#jUPK_Z%ho{M{I-hrrud~SBkO$Yd;ko$x z$M4e-CMZN5moZc_J>7K>fwYT$T7JHjY^ix=72IBIC2f&NjidWBHFs3f^F=WZ(vQ#( zW;?>1OL#!mEX58gK1J66vOAKuqJL!SYLHRfDqS1a zP4KnlyEyDX?Fzrfz;V!Ki;Hss_r*q(nX?-JJ+sMe{Z;ly9EXlfpA!nJm3{fw8+SP- z7=7`W^zg!zvoALx7jsAdEJ{qgzLvO?t@g~c{O^aq<(#mhTX9xF7AXm<3wTU`$`hv1 zY8`|O1|_PLI{rO)Gn?5+xpn*2iVBRE!b=kJQO~U4DTGn@Azq6)ADMkj+xVZkwingY z2X(SS`nEj@d>F>8%}>>te*|K)$;qxK;=PSm=2^{YFjKU9_ZRnU^U2@{Q@cjgutZt3 zplFCO&1pZ;Qp;PYHO6-#fAI-+q?`e@vdMGec7p0Md^Y%y_}b3-l9mVZ%lw^nmiH~V zD7VLo7%qLnUI@nt->>Uuw1LQgtE0gM8sB7g8Rr`tQt;~?OJHZfu@?Ou>T-(mlKc(Q zsR0tiN6t+<%r&06_u)j=&V4_pb@&~)*LB*|%OJ`>QseEUqEa)Xn`m^6aI(?s)H$SP z{z5~Y+WWkbMa5UBlBhV{p37Od`2QB)ip`1)Xb+&zgs^O3yKm-VE%eQ}DMhFNHl{LoTZmbZA<6?(dV#>mw*bR?H=<9GWOJR2T zy>$B>Wj5NDWx3n`&CVS?m@|qEi*8`)$vFf$C~>~Fi6v=nF8r{>Jg%o!7vosND_>D#MH^MZd=_SCDM09 z%d?Xn-1`3VsXq{M)7vqMP|M4Euh^G-eSl=cSl(3yFGpL2uoXUc#;&;iNufyASBcj% zv66xeaXG;m?QCyc)dp^_o90Q>cJOBW73ZMoRFr_oTg^87aI2?&o-l(W^vM%N)f*ks zBJ}v@KHMaQG9+LRtyQB@JlKPn89jHB-ddEnq)p1h7QskyraW2iE|2>qhDk~2H(&{5 z2Fh6hggeK3ZQSev?|&?_Uv}<4_PVaSeD#5w>z7wh?3=iuPlBlJb_X?`{y8#e{=M}| zQ1T)v2w}z6LFUU9)?EHZFLj^JiSy-;0>)Uc6jrN{T3eM*s7O7p)<42#1}aXM2ocD? z$@}?)6pPh;ll|t6aXQUF70AwFOx{l48Td9lIJK7*=^jzB^l8$urEI2jrm~R>Q!l!1 z_27@2o!O={x`+gpvE?<2AbByFlL88q5lIQoDRdsUyU$Y8FyCt=!x11#1SxSN+fHNI zS34)9G#C77W$Dg%WCiqOmfx3PL};_tq0%pb+8Sz@gYWb@_ryEgD?{3|b=32FLt8F& zwBq2J_oPzJyWui>O^2~x6~r0P49r-V-+@+jaP%m~%(}2LPwSfGL=TieW8s&Nml%ZA zSI+zv@}T!TNMMl7;|QKoYRIXs;%gUXT0lXNKFnu1y}Ws6ON9AQoNyHaMoL5Z*YiF; z2EVst20vup_~MG`qyL77QBoQHlE~rLZ{!6EH18Qeh$kB{w(?_)`k_2$6P zBrFr^m$Dxm;1&7)%0HYnnux#Gu&@8X^S9cF^h?+O07KYnL-`1}tyMr;6AeFZ>T*wR z$2Z!r*@9g1-8YPzhcT>_pXm}5x7qAZ)X$ zD{h|uA594Ob0nntMO%0yWj(UNXtAGF6;rwpR+l*Hj%ucWnaJ?R)YJu1^5Nv(UG&beyh5vGVUj(eVxY{)8i; ztj)LuU7YXCMfqCNc-eW36UN>j1KfQS@vC;Q<#beg;Rf4sZjW=4?43eECq;qeXgy*w|cx|hmd zWn>8mv?)e#t>7P)5XX}x-{<`^Rkl!&lU zH#$1XNg_GD=MA8QStg_7ra?h3S|Ik!B{?Pr5edPjdl|`epe~Z!5!2cax{O2h3rI)b2?%NKU5_H z4qM)H@2SZmOwOm}02$huF02Y9SG_KIw_4Sgmn*_IcS z`pkXt@2~Ee64sD;lb`y+rc+OHE|}#IQ{MjsNYQw?>WNOs_prwNo8NaO?6r?okR1^0Aev-DDC7o83KaxLmmn`XsDt zXu&7O zwlr6Jj)#@9M#m=YAHB7;qm^~uiQe~~A{Rim*b($5WDs3tiZi2WcSMZts5uiT$~T|< z>IxH|x6rB0+F^MvEvoX<`S$~#kL`5VR+7`9@e&P19sD4ZYwyl8<^I4wxx?z;O)nlt zFtaaXa=OM;3En%qsv`^J{}VrdCO?gLFLV`GREr-#>NhHu_KFXHPCnE-Gn!HCFe}?OsZb|D@ zTw%TehMErBNe|k!xQamivT03|*E55@zhU;on;FdBiraL%8k70HCZVu1XxzyZ-s2wy zKdFlBJi2YS0QH(&rEpeD%{ARRs)-3zR4NL$Wk^YMp}3`mQrGxsVcGpT<(_&8({0T^ z=fqRV(?7LE@a_Djb@E&(v~^sTBp&35KJ(?sHx%v;7RIcj>s^8VU3iFP7s!ZOj>W2u zB%gYD30GgAZMCQ!v0o1D*WCBC=6nY-kMJul96do{>18UiSu+#*OiC8q3>PL!A2N{# zzQ+0cs>o8HXVEmhcBky}N35s!l^M&pgOhS*Ww)sox05p;XfGEDS`8@K^342w*>STfwu z@8-;~P%3#kYLU#(v4W3UAM2sx`sBEB<J?N$}EEELta<}4$0sR zgi5fb75B-mRm;r{``kuaEWVD1O$^Ya0qH${cjT09z!^iNVFO!f@;2E!=53=l7+Uzho7ih2+Q%TV&1*RS<&(z^xv~bFYohMt zwv19Yj(J-fO!{+=AL<2l?!>4Gl?AOaSx|d*tq}6YSdrDld?w0p&i;X4hM&4T1?m@J zkqZV49zq#g#P_{RoWxY<2{2I%B1_0Zl9FYz^GFf8%hAasO?GXF%f%r<4<=;{+|E_n zcAr_5mSx~e5mkG<0C|heOE0L7{20x^pSQV}=C6yH{Q59A=VrA2IVGB!r)_8?!|MBVzG)1F5Q z9}5@XhGm4lxL_L=7=|$xlxX#-80m)m2X~?sXKnjowB~mFZTRHd2>+&8MXCO}4#q|r zx5V8WMDJ<5T3GousF9%1=W2sd?lKG_e+npfjK3FK^Fev_fC34l56$pMT$o4Om#~GQ zZ_N=gAyxxGE|D~*jwWuZRCE>#4dap~DhZ;CHmC8bG8 zJM*2BP`qwY0mtK}az7yJ_03o#*)^I{jFR81!uZy`H=a3(!E)Ih75L7PdF+;4f!Ao5 z@{?m3yN1^giGL&pkTtAyXhX2^NBfS!r*iTq8I)?|F8n~dQij0DNBi8q9apI}r0aa` zw$(7#4-J(*HqrJ^yW=IZ%X6>J?0Mz-CO$81@cVa;W30VuV7YxL{A%asq$HJc6xd$l=ps7`ys-fq3eF?G%fnm6Hja zuan`^>HW-7?c{$r&MvYG6A+GZ6_uf@=cqcTa(|TxTUr9q6#4_W`2!t7PSI^Aq>+5f z9QloXF#8g2$OQR?6wS#Q9{}N{QrT9}W=?El{^YAF#vGYAoVMCsu(?nMoeSPCi=w{{hE7YX_ib;l z%9U4gv?nX|OT7p={Isp^+FjeWc}-S18JK|NK-Z!0TuR<^q57ZZ$}Kiw^T8XVx+=rU zQsjmcaV&;I=p$N4bYxd=92=86h*Ng-SAEg(o#^Vd@fLUE!nKzbHjf;($I_a}4BVeK z!%|{~-}%<3w@7kbyYM9NI$6^$Wj#h8#!k2Yd@YB;M5I;}Lc0n{6Z0pV6blb3Kyg!j z%zF9u@78n;GMPudANWvQVN|N|c5m2S;=Qg57eD6w)^1X5w&nDN#UlQNxuGW#-@|cs zEeG%P@CJ}aNf)QPjHH8Lgo!2@U8Ux#D=HnK%JNV0pgL;7^4bmmf(X4?m?TE&Zv@rp zSxG*51UIJYHr#rc#kF)Ao?ok{>P~|9V);Cv%^&z#bI|TjoKaCM`4e5`hWZ;c5E9If z1C#1*np4VEzZ<6JJlfx<_WxzYN?{-jzvJ#EewuKtoomA~2a}FOD zgNlI6Xs4|oOq#p1fkeWQ8cUJ6ceMoauPXVpoKqz1)%(V3@m*}K;}yp8Y=v6 zesiB+97Qc>+m_dE8Sf!!)qPnc9U9FxPc(1Jc*%iNG}|IX*S{r2*-kJTi_KRTZpAxa z&7ogzXw5M-{IF~ji`Ya1AC~xI`zUYpnT6N31mh%}Ns$mLjzkwH`Znx|dzckvxlR)* z3FPgA;xnW(qWG|#rvo&EbTpYoWA$SLn0uYC_+eXC2#uu_Vo4ipnsH{$bk7@W`^~k> zk43}gnCMX`Ok|f|A2tfPd=S|18QMt@%=cy!<>z;(SGH|7SYP4k31=h1q0*1EQ#k;m zv7<>wNv)y3zNd?Yi)Iq=gHWDLZFHIIz((8~Pm_`h$+@4PpbE_^Pn>)vCIgeAePsD@ zyQX>~x641>Ot0Fw#&i~T`INlPYIO*o7XCf| zEA2$ou6d|U{0>3$UOUx6e${Hps3dnsg_JvgSlPzZ2r;lUiEDc zKGePqGTriYuGZZx)BV1{kALxatb5VP18@u8n-1pf{E{GU#)UC4Y}cAR7HRz9<{yiF zlYaaiis0?kuZ6A~%@}l2PkD12%JXfy>^Ek>olUJ5_VQrJu} zWh%2<_dqQVsx!a3{eQ{@DozMQTB5efa0>oa{Mvx6L**ECWYTM2P84H(DQ#);ttQ!! zUzDd+PBoEW4 z8x$`V6ovn83Ls&UZ6gWyaq4idSDx`n@7XK4Kj%D(K|Om%lhJrDDen6c_*oh;enk^m--HW zymKiGBIfn&FVQECmbeIGYe*oCuWPQj%|;tn@}?0erze-QXS3bGNdIVcQ+G$@{PXQsox4)TTd)e30FpXLoO?@U*tQOrVn(F z^Mdczr`UM=-u!(z?^j%3NfD67TU@%nb{vXVlMaMtmZf+2N1Bne5k_`e;v%Qt(18EM zYw|WPqhB9%eS;2^8=Tc8o!z=eQK#tHZbupSbMunMNS5iJsbhws_;fTsD%p25`hFGM za_#}t_k3*XPJ{&?@VSmZ%u&7-kd<|}KvDl_<%H3rJ4C+H!B?zywzsze=7|REed1`xTO~U*Zb+K4${BOZ>LDi>b7%PS>=-Gw z<%V0ozz%n%NVeloPnCxqNm~_#1a@STX_HY0h`6*cMIjQG2_9w+Ed_%Z|7YtcfpI*Io0tsBk;JU?c zk*~hbDvb(5hNs zpBKfFViG`YjZ2NsF5_jLgHQD^mhGuaaz$v(V8fyxHn!{TQr0>QFsnG#Mt#N=M`IF9 zWHC6#b9T_UV~BI*ikn+1m8275AvgEm@(tXe-W$O3bS=E@~omQU^1PT|}YUuTmN9Vs+}@ZW7l8d__Bx5}?7 zx^LgJt>^uzfgi-|qdnIXPI?u9bgQR85 zi`BFZ(ZS-2tJKtTI#{))YQo@h!yOOux}Pv)7jZncB*`H($?m!0=`Qf>e*{!+hP)*} zZ4goA`cQKGbRsKu6%DoHw!RJ1K6b%))h>9vy}v%z?rXw^HcR)#tDEY_XWKV#nOER; zCP)S7b+e;F!+|FnNSVSsA z>hXCjTo3MN`C(p6;Y>WfS4tMS)a@%(6bJqhr6^-!0$_rlc(X9x->Mz^qi455%gIVf zb1LI;9fTjL!KP_@J?%(y+IBZr3ZIu@Q~5{T$yzqjZrhAWNIA?ZzU? zf2|aS@<7;|+)?&>vO|UjbT}SZt3mI?F?0RCjAHdU30>4qd|Y(1k#!ua@fveChKWGBbgm76$?WP8u#ZIbb4o2*Zv$ja{qh*X4)BlygqI)VJ5Q<{T&* zM+GFj)0|&Z{`@BfBgZp|J*J!G$7*xK0K=B+w)PI&UpmRPjQii+l1-q)3GlV#JG1Ok zd8S=Fv3+N#_O$+^T@iwJ$3y-^G6P?;?^euSpH(d?%GrSp=CqrRwA#P+S%&tH1dcR# zJrG19dg!L&7&*#Y3(}Y+{NfsySy{l0v})*_@c<+EnX0*V;Cb-b5m*1gvO{*JKo<95(a zaXzyf?!-S#PgBk0eDlDnj|DzGxzD(B?uOrEclVyaG4FE*GExsySIhwD`P?$7g-7x>dm8 zo;H4cY|F~=53q+^8tBgfYVfl^Bl<4yplcevKQ#ugx-`Q{D%LK7;CCk(W6*}U>wH^F ztIIz>X4A@5$g{VVzdbHutbt|wxkWrQx0D<>hrc_m_SV!g{ot^L>26Hn%fy_TKYAW- zzq4uTmzD0FXK@~iZEv==o@J?Q_B(c^n`!t*Byw`B^?xS3Cb*sp$jz&AmA1N#)QDq9 ze5=eDgq+oKK)>BL!)l;%luqc@?q+=DP^y)`{cLH{EsO+bRR8QqjdfEi)p1Id~<2jJz zSFboaI?aGO@?kkB$r8F>Wkr)GLO-B3P&$l%h#t2v^fBx8jL5LK{R~!n1G_f{Brwnh zez{!hzSi>Kk9^66gVNXh?D5eE_$iAQeRhRWxA)up2_nGF2TyqEytTHbApt4tbo*0p zlxI%V-0GIkxq8`JR^AbN7D7PK>c#GW+Y(=o^pMF(|FG0g>|a_a>Nl#%YXI8LQ^ti{ z;qS``7Dh;(%v-ral~BJhLVT^~a0c%CjHh&jAiD^VUYpfac^|**t8kPYtd6vJr^m3P zvL^ZO$b-~0-ZZY$_T?!fyYn)nZxV~jz%QYd#E-l3noDXEEb7ti8Jwb+72Nhe61Owk z6=F-p3%F%z_EiHpCkZWyr-wh~E=|_j3FVc{AiZoSjSnpQWkwg%XDD&nJ+!>JA*fn7 z1=aKhHdJ1#d|=rcG8fp#qk!y8I}?(|bC4^xsY1*fyM} zB8|aKA1haSE94Fh3G^px*vtrx#l}R}+LNU*Tb2fp2O*~&$>db;D;Xba0of?z!e@p` zT4R3R8^`wC#^&gisH~ ztT~ZDmLLrO0`_11lzT9m9f8gR;l`r_m6mxg&h*;j*iuaBXvJSyp;w8Z5bz= zrb{YlB3V3Q+a4I~Jm=KDa|!A=4HcWWxq@&!6>J^(eY zNSVcFh|*?y@*aahoCkgOhb7JokHr(*TNPuUl6}Q9;iDx-+V0`{cAU|qUq&5V>b1FT z4|mHsed2f6G+>BBCGX3Z=idf`vwvYP@~@%q%`&`z)qQ@bOMJKcpXUx2$BCn)Aw{KY zc+ZCNoIG_eKjCUL_@64+a$(hELTft`_Fw%ZOmbX+1T%`&2qKxY>T{yZbeaR!Ek&Zc zCvuM@#0j@HyIgu7!rf&_u*f;zOrBKgqu!IKHji7z)&IL`5Ox@c@p_$`E;ZbvAL|3t>%6{tc&)l}uVZ@8t5y8x1Wq6J9p=3{z0^cp+TC{g1w+i0`q@lvLfqf5*`&5rUGCXc?I@8B(VN z;urRg>ioMY7l)*`#Yl_Eg=Julw9ZzRmq=z)h8b(cA>J^VdyisIZ_gjC^k z=+C*OJ~gLmJvG#E=!@*y(^Ww8kkdTJ>PE}&V{!~+xDq6tF{o({`zj6QO({muZ zE^T4Q^ToJFkm$35ppE~w-mCmf-Yl~5y=$Q;vd-{RBqk<7=(5C<>7{4NmZCfy)K}XH z^8rK4q4WHw%bi1&7hFWb_PWq;((AYx5~ZjWLgL_K$a*3)Ub~vAhN(%&%htFz=Cd~CF{+>-G+Z9u7Bs-4|9DCDsm83niBL(I9WMsXijV!?3U4N>J z)?mK=QlbQqtWPG?k%6PEPS5LcW{#no53^viC}!d&Yf(=$R*RQxh}-c^ep%v!q*GA6 z?Wv8t{evt)^_-b#weF5N-bkdy zE2=0svrXb0yBZ^Ifi>NxZuu5tAQ&x6eGd4I#K-Y8hc_~$)O3zB39wTkhgNzYc%i@v z^sy*_c$bv)r_9YWXH|8m zPbx>LXP<~4>c#|RoBAnm=lh}QG_;M8e>iwGAp+@6yUW7!sQDfhB8)v#BaL*nC-$=+ zlr0TCQWmeX$1+xb`_gcP1m}1>Pg4$I`^kgGu$gF!87@K%{ucFzvPt}Ja@_&m_56Gd^5rK%n$*i~i7hXEv z$kZIR_l9WWZ?-~vUeQP=%TrL~OY=(T0^D9|5Po<3-cS;{t34v4)$+p*B?7m_4!2-h zo|CYeA{LSdi=%uqfMk-faS%2S9r{7Ip*Dbd9fDTemTu^5HXD*NsTXFkrc9IFj2$kF zzR*x`g@yI)h~vXeR<}F=qOsu0#ENh%x_9b_E_t96nPHH$y$|w>x^xuuQ~*aMAt!36 zqCME74z~ii;AB6zq)1#l`is26h7Iy$2xZu3$rlwRt=25}m&4}2FnKfgCEX&i7WNg& zxl*X~_lbD@yYI>O;C{2FyW?S(wQw2H2~~#&jKh)N23K`<7FPxrcsJ-?HvnU|bD>Qv6Bm3*ynt`S8Cr{<6y}p|4B1132K8Hy#%3M%42XTGP z5PGGFD%&bKfd&`uo>nxE$#Z;m(^v!t{%|3TLyEt6aT<&E<-Z>sY?q1Ad_fo1a_!dY zJx|@b-mve;c^KX5o!q|M{+k2`zo_y(EdipQnWPGdlRmx^7uXXY%+2DgrI1FiO*EW= zRJ*Nvp~w97XvKv;>JX(*=dNCDp5%mh_?O0s8j=ij^L`Q+cWbMA=4t`-`uaL@!7bz8 zO<&Q#x?hfO#D$1&u{wjFDx>M(Yt8!7TO z0ScNob9>br)o^c^An{wukd3Mbx0kEuF?P^2XH|yaS&)kqoLE1-6FVS#R3_N(>KuY2 zBVWe{c<&tm9e_?gsNj zk*muSBeO<*8Ooxo)kL!clq$z63)Su?PO7AZ4}4JG#8Y?+GT+>Sy%DWr8-!JWu)nCj z4X_qb3s2T39pV(7jP;iHw-ySzC{#ofl)&{-@8mfCKGCQ5DtXjWdD&`yUAOHxxGWcq ze06-4%vI08Pvn>=BcH9W>lT~&=>Ef7x-g&eS@)?>XAObV0^LRJ+hUJEaaDVC9_5ZF zbtBUxdx8T>6dnBcTeM?jXJxl-p`#c|-Pf5_NpULWpM$$&mgr=SCFtDo(H0*w_l!dc zckgHYkV1#~A{1{z2mJm~m9-;Pc(hCGp|%hwc;)T$iY2*ZDGP^wMyB)KJ!1;)tyX6` z3#MX!)Oc_Q1);$bD7{{4vGAC0YZdrwyoXHkdlPRXumn;x@8+YiQ}k&a{H2}uVwM<7 z6HUc-0WjjNLJ6J0y3nk(hSV?Y2w&0mvOgDT3 z{=V-)Mgz(Da)FfN#jw~BdZnqU8OStypI61uoF|1AB8q$uQdD0k$`q4hp2efD<;%@^ zF4NCn#QSfG6n&C6Q}5FiSr<3`#Ln8zM;H5|_ni;7){Yj^VW2e%!)8ERoq45yg)BU6 zSI^6`DhF1mX3~$q89k6>5a{8IdU|%Q)WAQ|H9|pF^IV;BmCNo&nzJ*cX+O+tImf{8 zZ2+Y72WfiJ*o~Bw(Jv%ZegjEj@#P-(*Vc&lWc3(0#LPOQR(?nW#A09lmd_f2&bvL2MpzGDd?tgrHfmOa7u-Q-kU0T}<`v zNkovB1AavXoR&+LZr+5(P_c+Mw_+2cCe=(Z|Kc5|V{HXfgd4cPXzr+!N;fyW?(l$oeQ-*D4;1eu%6 zw7R^1+obl3@v0m8YiPwj=}z4k$KI0@^y4(Qt{-{OFYa zM!ZxC^!L%U@FGZ?S1-Y2ks$P4>8)1+2NdVysLfl`)x&=$(@NTE2ask^0_W%6if{I^ znZ9-#5hyYuuf5OkN{01SD9LT5{K4AqzZHI3i;|~HI8Z`$qE+IZf-icl4U|I81TAQ9 z*(C%jU?n4u;`|l$E%;HQO0qzKXahEt?K|YVOV(_S(Vl4mIL z_1ajZt@MwA=Q7vuHon4|-flDR_T4Q_Zdf)Mf6d+V(8&!#@F!);t?k$|f7>t?>`eLH zVz(`SeyrI!QoAhZ)?QuUPLTMZl#dTDre-$8NB3Kp&`(7W zKJA=t4JHG^z#LjT*`cj&Sa0oYPb<tH1fRwV+`D&s6EcD7oZ>JWrz^Y#;n(DGOt}AtVzl@H@K7 z#vICfROFy_qqj#w`Vx^e;w;})gr0FQEmn~LNR>*}s|gh6mHJFO?3fC+V=bT3;Feg^ z!jGeyuQ%<~>sxZ}V%pVZKTofJo3Pm1UtS^zdu77QjOzfsHKhxkAK zgK6BS>WLbwueEZWh>v*_8{6#Km`%zpMxNwvOAQUE7%bL8i~du|B0q^)ol>9T5rPz@ zp@?VwwG)Fat{q&ciN?SsS8%4}b$G0>LYRG2aJH?`@xH{)o?4C=K#~sy)kjmeIFe2+ zOktJ4!9e(irVjU9xhhp8d%4?)BHf_CXsINDkTFzxwHR5Ua8tZV7gC>pyW<)1rk{NV zg3&n)YQ;kJ6iVc5v^QnLENeSNEMjSjhTF zilrmdu<4WKH^+0_-Ng!45ut$ap+(w7i&C*-Z${bjv(FqF*O-8z+U^^$klm zK$VBSIh}(^?f$}UaZK>ofEwgZ$>-FU8{V?72|{hrZ>G+ew;{lM`WqCLAtlbCvszus zx77D5`?h~(f(rdYPuZ6`fWf4H8)Jc5)cYKPN4+|NjEdROjj#b^6i{uELBG}$>x$kc zcP=AKj}%_hj=X7uLH>|+J!)*L#G7T);s4uG7fj2lKbP##HZ+MsShP38Uf#Cbn{73c z7MR+}=!=|LS4)+S?hz99UG)a^kBlj)KzDt(&P= zRZVVb$RiGjAI#Bz;BQu69(sQV7KT=E)KWk4n$RRa+=}70ROuJRLpgwC|4NT{>y^Uo zPUf!dOQ=S&7JYzz7fX18J4^t`>pi*U9Bu-~#4u9M0w<#q1HS8Xmg7&8QGN;nQ_g{n zN9oW-Gx}!zT-|j#C>#4nT=snwN#eUINVQ7D*ISt`N zhezSxUx$1IC9MxVbR#q-01N1nG!jBt7-4~xH`onlK*vE4&?0>F9WSc+*)A8ju2bfs zM9WtBgSmTe1DsX)bMwh(Vf9=mJK(2)cS6$P3aeMib6klQs~+QOaBEIrorg$_><~fd zkBvKkGuhury00~a&Q8`z-m^6Nx~+uL!hP}0Q~2c&Haq>3flTXiDtZdE^2Suvmw=Q9 zTaqZ8z`m5W1XbJbv5Nr9;~sTwHt!@jqF9V)8mp6&NOqY$?!I~3IdAZRNz=%kB~&Kv zs6bKl+EocT19(yw%{%)eo-_dLqbs9+5YrX!&IhMM9eEM%CFKODq$-5~Y4k@dnhW#2lPPcgTq@hzoE$~z?$l(dBo&XyRxn-|tt4|q9Ge#>7m$JV81 z#0O_|UL5T(^?exgvJisTy)o0viACCx$c8%xq?a_I!@qbci4P&U;gxIq8P1jF-5dMWY zgGKaHJgrp3bBYv(!U6p`X}tU(vwEMyF8f_Dl7CH8SVYo{daC2J3<%&}mVTVbO#T3c zbtL=T{~*}vt=TQamU^avp`y2|p#ltJB7vCsKf)wggpzKRR!DVtNG7erbkq29EI|!Y z24qS#({Pi2H+`v&uxPv3x00fxBDz}J8wTm-o6whsSYjpBlmC*u)RFlf2_9qBIlw8T zc(bkPTZjyQyzS`%b;1Ioa2GKy)Yl7RQ+le3M}3N+E!pl)ndQyX8^HrVE*n-pNPe}W zS+-u^!KOUhAfMtXhBebda7Y1XH3!ig&z#GGm%E#*m$N?w;gCFb75>vpi>#c}sH~dyC znC>kngyWoS=4J(~&KX5UB*EtaMf?)_MFJtFpLXns+jl#fR}4Zekf%08YKj=MQ zL1Tzt22W=;q9uKKYB*=|0P-x0{$_W`L!m=UlG>PSig`g_u%N++#GbXjo5Ymw1oRXs zhb5CwBpYzi8>Bhj(xmb29ns!384CB_Fk`9oY%(R=_;j}^{J(}L|46mAl}&q{ev~VY zq5QAJ_%|VXi}7Y|dwla)7-qWa+CExBK*#X$@^|b>uaNQ8T85v-O9L#aL9cx2BgJ%J zYF85S6Ik$1o10PLGn_G8J#PG}$HOy|Xmx(NJgJv4i5PdbPDWY#F6CpoZc0VhRYIm3 zF!~-~LkeTUH&dmN6uqTveAsjbHkaN5wO@fQ&Nbj@@7RUWhxcr|{@W7oDT2qq6h&-u z|B1Z*g!E&Aq(apK^>fhI7)Rl*{o{H@px{=UEw-h=o6?L9gOY`f4_|=5?&Zlw)wPP~ z59KKyL2`#!qYbV2io0ot8VocOdWY~dD84HQa3sEpl3K*m#7?T&JD_~3ctsYm0Q73kxDn_EDC;=Z9iEF_KfdRLaqMi8a^r(64d#{A>JhVY z-#+sBio;L0-I%xT|nW$>0O`iC)lud6QV% ztMa8~<+9z^^SJ2n4v(_l(8KfiAiTxNpO+5NBBcCl-|@L#OgO%DG%I@ zcm9ikeoNZk!>!rWXkYC27`W49RBWmL z;0axkwmX7j71;iq%3~nh^h?`h4BE@Lo{;l|YbA{Tebg`ZWBzqB!-pNd;z&FW@roL~ z{?%^M%TsstY`Y!*Ea=lNFw2nlT$CB>EexNc=(-@|D`F}e3u4CUAvBQFi*N$w zKO?)J>$>MkWhcI-%nxNnsXK5xVqS$_+1+C|u-iRFRrGQd`wgQgQW=Up8ne^HDX$Ty z1JzCFVJ4g{t$3?g6fXjpx$T_SXzpt2Qzw}VlqJsQm@7B9`%ZT`!m`(LI_r8^-mk_wo z^H+GWsH_fn)mGTCBlEP=u*hAH^+#!m`)bmsuBC`3-G5(5M*yCUU{^PBG zf6|4#@yAq6VC9HO(y0I*IxfqUg<6kq`|!+fGi#G=)+XyjE-70l7hFwz(Pkvj<1X$0~~1$K3CL>9NnTO-&1LXHl5{gdUuFjeN(@osi` zammsX+*|a?g}K-mZo=a4VK)Kj{L*!)@}--YM6_=xgb7e0$rA2ljdboZ5#2I`y74mz zDS|(XMYrY3&W>mo2@RXSR~;TzZ?wIupVtC88}um88cW{Pi5wS?Xam3z?ZvAIeeQno zD5cf#dK``^3m3qTWkNpF8K=70w%=*hgIt8aG0rpfrate?z)Z-u8j%jC$Hr5nY;w08efai{k2rb__u4l+gf<( z&68>VL;Ib(8lyk*Cj-6R@AfY6Q5k4cA;MasjhM+WRNTCc*&@N7Z(sxbhsG)h`e?rC z@~h9x5~cuvt%CFK(te48yVZHfM?7>A^qWhb9&S%G zKOf`QJt*L@e1#brdIodA`cUwb;76mfj#gDf=UfKV(5y7fjQp%y&p`)80+^eO^m@MUD0Pd9! z@{6;*gDyl_8jW^CSd`_p{aOi9-d#EOcrE8BF3*!}B9+S}&NlRYwmE{ORVkpxebN8% z$Jsv#-Y2w|c72$aOA5gF<*1wQU_c&T$ySP&__C#f%b$wo`t=uive2c;nyIm!uFnet z_f+)F-sn}9+kCjpAOBNvy`u0Voh+7S|LiQ0u(k|!mf>h*^IUEvIlf%$mOw(;uj$$0l#(4bgc!Lt;QY(hx&QS#r1x(EmP z;ZfJ~%;m5gEtSv)@d$yK%kP0wV|+%%&U*`hDf5TxAC+zyJ|Sq&FT+<%j5QM)_FT%h zJ-iPc?PM_D15b{tt1m1k9eibGx_|$JuO;q5GxU%AC-V229F15O>|^47t%4I)$dVho zvBMzE0Cvq>S(xGQ@WkYJ^pkrJRMx(_Hj=I8jA;ueNsdm0q5j&$sOKhZswGNhIqR(t zAnoyE)_o;r=LB0^MGynz?HCccSzyCj7tQXo+q4i!9V##f0;{rbT^PwffeiUpUBuDk zDrCHEmK!zmnjmiPhsn*^zu!5S=MyZwY3i2Xx#wzztM{^nd%mZe-1{PV!qN*Bl;?6X zPHi2C?cKY5t@tWwTbs{?nYSq?+YCJ9hnaPDYx|GnDsAHb&icGMH|Gd0bWc*tuRXxf zhUUxfPb6C}@$y;@uI%bFXU^nH!%*nuGg)iKvK}zeXF`&H0}p@nw)EqoCz1A}qvVlQ znmXRW?zJjfh)HkOa(gCQub=reLhs$>m2F%UdS?P0+%ln^=p97-Ca~#dj0-KZoVUTV zza|7;3Fh;|qRj?RqvYgH(^2JJeRIZS)z@cD4rWbGnok;`Z?@MS)B4U~>`8w}9}Z`9 z8JeAkZDN8!jzc4)t+ekoU+Mo|>UONh{Nl-|jZaO$IlkP+IOVnwE_^bMcm)5c{2OEy z**fK23fR_JUoaBj=BA~gqVC9gkV0|}Mx1`F!?S{k=uvPzD&MWQS!(_^M!VRbUJ38@ z%07CoinoQN!O~t~siyDVx8NLd?-tkQr6FXpr82#ykK+G+;M203Ja>|OEikBRuC}4) z#J1bX4|aE|Sx#iJ<%%Wsg|Ec5!H4}Bn#I4znI?$z=QP;Qf&E-1!ccw;ih$m`rarXRFU zio5|f+Yk-423dXL7DnC7cxOa9|9*Su&D+1y&}7WdS?o8GrSNLqUqXjR(&Qw;=m%eP zX=x>C4?|%9xazB)LH>2(>G1f7Oed{J^3r78@e=pMlaAF$@F{kHS5e01W9y6?`!l>5 zhZsIkg{hzrd;>L$u3GThhWv!)1ADdLH^GH3IRgY8>lf(j6#}sL=`4?qhOkN1qkffm z)=4_8V(}OOlcJ1+$jDdBTZG$m)v}!0D3tpNnI6+hBo(>=HC6f(GbtjXR~SefUyKdpOyc(;DdqLP`V+^PH*$iDYFB<3(5@mY|d~#!}KD~t0nwI&iS(c?V~fMEal1Z zQzxje2>Mg7*L8y&Aky24!UK9uWb0Pze(Ch(r@ZM1Brj2A4XT;1u|xeOooF7L3f&y_ zlPzuSh=@TtXgb;CI;>aj9nKh;4`N`yGA3kwTXIh*f@Gn8NuM-TKIOC1T|A@wL{^=6 zA$CuM1YUNjKImjI7p(*9(RtksS;9T4Y)-bB3qGgPIy!{U6pgxlBkW!6Iq-rgfwdD{;W6Fp#@ZrWL+bFLl>v>)Q^1F;q#>C9(!W7(svc6dosqs(Hy@Xmv;*a}^^+$&iVef`pOYr5jfq>wXB8;9vuRJ=4;8zr-_=%)`Cq zy>vzekqK98kJIr=-?qR46aAD8;OVqNPyg&S(>?|9wlXKxKEU@WAS5*8WqdK0DjPjt z($4ein1D#6k$1}~yw&-_ZKQ6^OR1S)20%9DUDw=W1aV(VMz%i`Nx+aFgAfhb^*H1l z_tT$?4-zqfEKQvMy>N~W#>Pf8cwsD?Qgky78SwPCd!nl4d6s|y#DO|}IFlyF3h(^$ zwP5+`Ll%k$!f~M$22%?=Dk}?w|Cuo9u$!vJ; z>0D{f2>|95l1~;_jS^#`ef3B`*sWdo&r)b7maT($96{RH`cdJ;`TaSrFw@NyI}BV4 z##{Ea*zNE`1F+~dma+CW(qcKAjs<_Cr+|5~CL6<&C%gf|Uo>+$;3xjrvJAP>VellD zEu#zGe7^`b{(EkYKx&9Q2%|gliDl^@7;JPqIt|@g^nApt6;->WjAVkY5xqm1=klj| z>oKKY1@>oiI|;9(c0=D5$noYoPAKftDTyk_W!})xLQ(3^j2lg7lSynx~w%q@yUuK_juTS?Q5xo}ziX2N7 zO7Ox{fZAsDHi1yBGEvq7Kae^(%v>27&QmJ>U6Hj;gtdCY16`Qrdw~lM!?t5SbuBiZUqJ zF!Cyq<*S6X#e!s%r98AbJ;Y-GpF9X)S#J*ucU|2oXt##2&Xu!0iU2F`KPvaX;JNij|BsY6)<#{DMsme`4!y6g z{lY>+=4Gh6`bk#eV)QcDD^5!4&rLfX5N_tApMrH!)*^O&1p3jfTzb{wt+ENZh>Qus z1T0%dRUR&v=?3{Yyc#y_Yq;w+D<#o1Ts|w%x6^6BF747WPcPon;}h3?10)foNXgXh za^=Os%lYReA@F zalRqCo=&0gR1i~F@Uh1}Qv&VnLvkP{~cPA zU~XxIP2+}&2FbPQ1|a15mnR*wN|7KL%@jCnaO9I)iG21DCBT*w23DecX}5P!5D$o0 zVaTIrENkBA=xL)?eSMM@>_t}n_V0?MZ7oS}BFau>o)B)yRHyg^M^x0b?vQ-eMTIgd z&H*Jkou}Y*7wjI|PP`R`;Hm6VH->?K#Pv&WnqK|ttPo3o$CF0O=MqyZT)QY*6$Rbb zf0T!upYCco_l2E;7nY{?(1g`{$pA44+o ztTRF%{#(A+9&u#GgxSj6`jAJq)I&3;F(5~dj@tyti^zf^k(tI|CN4jkG%!Cq+f2Bv z#7jesAB43A?R?VtJuEX{kz{igjtUA%en6$^_{p^z33kmq%`L}Cy^{X^{IQT|Tkoxg zgwj5u=63IC=H~2~K_FK!mFA6kd%kdqacsO(9|ty~30&%b3Tv=yoplEiPkBBVf~m_c zye9-eM*iMcARPy(vuJfSsmfB|;oqY|f1t8(?If?^If6~v(KK_Yh!MD;_+rgWRGz-} z=0+d+R!;?vIy2;M7)m0$Ohd029&nL3=;0vdpN6%{ecx>n2qZ#jKBoG8Vn|%)0}=|d zU5N5z=tc(YYNr)r-`AieWtx;l^g;=^)Y+^t&<5*5kPFwY;P23xiQ&dN>bz}l|WEW3W`i>y=4IL#vVc48OT z)Zsy+)21}Tu@dcLW%eoVpnkSQJ67wxo{N9z@So%^((krU)*tAuk2wdA$Z3GRbPGI4 zp6P$TK*bd9NsyOC4eXZ4SvYVlyFQC^sr|m2B|dTee@)|q{)+pM9%Xm3N`;%+KB0cX z+1}=EtI|3Qu72-7cs%!^_vzHS!_R!*ty)D@`<`4M%B=8;j;se4BXW=cJ5d?=YOL&( zK=;+zcg5$0n1gDANV$JJP*3ly!`krD70S1@PVRzuviYc+T^}qrD8aqL4t|9fd%sd({;Yu z|GfZ{$GVRb{<{MH{gvX{C8g9i(77bWj7PAxI_Q6L$%)I~W|eFjmjcrmXJUuVdl-VhjPQyB*lZC z@DhRdM$C{&c+_0zgLa+Q2i62;xgxuKWH@}jkgi9WaJNNQ=7BrsFL}E6kPW^p8WM!i zsBIM&DL%pL*fEjg259vg2mf91ep(H-LpY((R0-MHIz+&X>R9lLvvm`$^(yOH(mgyW z$N$Zn84N3|$ZOMCDUVuG{z~X4Fgd^0sV(Gxp+(`9e*|q0*mk43*WCCoEBISCV#hi- zVo4jB0yxJlYwz8ji|T>tnQVaQy$p(Mz66zzrv&u7b36?W7wQ@@i&_^w21vVpsG5c#E&-)W}5ujiLxOxs9qE5Rvpm(^W#~rHV5;Qz6}*7 zHeCsN)FjXcgF)BD05boFs%+IGN<76$&0#IfII3ccZ2xCfN7e5DP83i$$CZVR*PIv^ z`HJF9q$*Sh`PK)qNHzRP3i2=Uhu=7P!0?Dh1vjp}ITn9AxykXfPAw{K@U*x;w^h9NuDF84TN z^>#Svx6IaQLbgiP@RIuX0!MQFO>Sd!*hJ$2)sA^0OlPsk*cVNHVF#D|x&CAI6~A|O z)mPV4yjO%()aa2A`y~C;{Aeua3{0~{j7jKu5f&|5w$)Xp>tP1z-2lQ#S)vZoS4!D` z_1$Srvw7Lj-Iueh^$WgWUDiZ+CzS3rNjHdb^v`X!HN_J*hUrO>-lnh>_3{EGF%*Y0 zFK6LD{va+rU_V8#>V%AM5<6o~w*48Bw%z#KzVA(!g)jb?!e6(}*N?G&Uf)}Zi#GS| zqJA+8wYt!b^94jbh|XxAhij0Rr7bv{?89NtgZ5j#KX~{znJ_UVOGL_IH#Jy(FEk%| z{JbylHEG)qg*=o=p^E0sRng}fh+50U1E^!RDfdV})Cu)OeL9n1EWA~c<2cC=j^sYe z3P~hcgWHZEn*HpP5}gRFTbT8ay*c1)hb}-fw6B}9Mh^ND;kQ6W{UH# z1*QxS!P?dOjhNCQSQ5m++eE?FtFn)dF|+mO=yI3?7uB(&$BYxKZ_$H5sp;37Vq=-K znyL4|uAaV18fBj{aZGd(z_f)-2pc0_USeLx&E;SBI{LgPUnfje8t&@c?M7I-Q|9C} zJ(D5YZ)r={oS`^K^%MQ|e(N*zec1egb7Tm%d(6NdmEReS?$b(&SyOOnpu=3n8G?9& z(_D0rKJ6C?pQ-nfn$I2lE51GMh6*nycvA$ddXRBf*1#GxE)56&xbNaTOCEVAg0nro zrMu~T?snV1`?_k}g6_O!O^ItgYzk}kU%TUr>1V=NnJf&gWw$%Vt}Gd5|assjSyEUNqCiH4@1ty|vQKK;n+c6D^=EC2M0<-c+$G4Wo4A& zPGRGx*|}9KXYV^hBvct&ZXBoFGgg;oE6&45)lM&A@yxMKbGd_%y}6C`d7NGvoNZS) zHx4ZvK&9tClYUklg+A+{uYmVV>ete6pxbsU`o66n?lmszod+*Yyr#fV;|#0dTF+(c zWYR8$S))lfwh{Oix3!N+Ugh9Y0FMpuoE&oLe|lfQIZ~%7@Y?M{;Ai+kjYlJ5Oh$3T zF2}nFI|4G|QpXEjFlPuXitOSxtX{vZ#LANT(`|fpy6_KRwXMtiT!tr~qgkP+=r_4T z6$A_JUe7O8UZS-RzLwrJQakhsL}q3HfzyRpIkT?-1b3ItM{}l!@d!RMXA3lLf9*?k z9QsiNMJu^YD5d6Kwe(_d8T_KUaKLDPKLze7&L(`!rVrdo4wVq4PJRV38g;^-x#>!0 zTl9Dm`baZXwMvXP2GCq^B(?q8f*r*y4%&MQ!5`vto-OUFCwzpPmSG#|8{zYy0ns>C zQd#|8>0ZMsr!G;K&?VE`(E8`$OQ;a+2|^FDgcpwn*|-5wwg)+0M}oot4O)}_9^a5% zUxs}s+Y`%5of2Bfe}3a$iU`U2C-W-xV&RL>9ccgtL$b zF5^9}Zg6{SGc;!OrxHo)$}I-+-jh#Ibf#1?f9Y1C=3MQVAbVG9)j$YNl6kBC2oE`G ziA;^gB-o%!!m=cl`IPAKL;v8_d7nsw23wf_rS$@+ZcY#aIi;kIlLeb2r0mRt1J+J)=x3;T)YsD>=LU z?6MMF2pyZaSc$Z4E1hAfU2tpU23N(L_fWVsws5BEHVf&UO4a?IzPwARvR>kYG^kKWEQO)GQCPEEde@jXo%WOqdqZ|1 z^8#rYQ}ez4T5bDWe?G2OZUCprUX6ul=AKL2MpV*KZGSBG{aGcRCyElj2(K|y#}xhO zpFmd9Zy0x?)0*%aA0a9HII8s9nd!2i{uJGZjrjU$d7dF=2n1N&7iZ)xK1a^qJ)c!_ zdhp*BbI(YIV$V8bs8xkDZE-lz9&%xfwN{K(8sOIayQ0=Xp#P=%N^#ecHsFrZ@y3`+ zxg545C*j*hquq^XuEi@Vs}v3_iZA#ywS-{Tcs8oiHR)gx7)cmKcsf$V&c&}^h;PcM zZ+T;v9f-*OMN&76kS@r5ZAIT9dH#Y&NT~to{_Wk9~rBzf* z0gFSWZ>1s*cko?1rdf^SzOx^Iq9wmw@cZyV8g^!GI44?Sx6y^-n(OgmzamB#lA^k4 zN_9r&ZkOY`IsO^`llrjt>Jz*r0l@#nDh7#OX%ez|1(XdV;dP=&*4?%}LUon!Kd@AQ+mk{_Ip@LZo3OqcxBVQ(pMuX8o@)ze!4u+d+9 zYS))5!Btt#CJpf1#(-JS|IX1OYz#OZXz|c)kJMw{Q6ENmkFOi?3|K{+J&nA_7huz| z8wol9&QlNb@%?zrue9nvDd}b8l$PsOfj|b?;pD~OKzaFj%#-~1_nI>sh!xl&e^>ZO zSq04e%=G@7tCR8^V|$O>M%@sD`fls@)_cnPk8+E_uun_+Gmuz=RSKzthzRza_qqti z6F{jUmJMPWb5q~d%J0@xIgxBMEH}hBYWPne%4ihKAqCRlZQ~>1P_B7_AGRs zZh9Gj9k6P_Jsk_RHBJHfncn`(wa|BqP6m@)ifp+^A2S1mi?+B`(-PNZ^!4xJ{6whf z1FtAIaXq$Z1rPa@`ond>sCGR1<(5hR_Z5$}KM{gw`I6aAdeFaaF_QX;x`ywOKEn~7 zW1a6=Iy;Wi^Q21OSFBCUw)sV_m(kB-@I2504&DpxPI|THOtDbo*9pS?8=dUV_WijP)E#+uw1q4(RPS{uizl zTvcGueC6WYVEO~)zN_gTci;J1xNWH}XN_tf9=1QBvua6)mmsy^UQ?0pnW43$$tY8m zM&q_HRjaNY8yd<27o?)#+tA61Ld3bsg^59Se7UjI>AES`ZM)Eu<*Rd{#Ckd(I2v({`Hrv^2PaQ|R53uk^{$~utimN; z-WUn?dLakYb#;a)vK3lzyIk$QVL9g!wBb&_`TS#gfWrAzt}y(K6b;t~+f$2tlcm!R z0m`FNM~MOTF&XhT{n0I3-_emKu#hy2)Q+}kZ1V|Dw#0laILTfgi_-u}u2%w) z%ckQh3#TqHkH%?h5=ZWW8;^r_arE!0-wF-p?jM5@? zQ9|B$zTG5dDZR2b%4~*y2{}dRa(FDbDs70`5?;d&Y7|!T0L`-SXk>SE(G^=^a4^0a znJ#O=wT5BU%+fWG`Vb0LGGoc$CTC4H0CSF6ZS)5zak=3KLL}xhKNM|;H>k`;95n$D zcQsx0hx&;YyDSOy<5$b^Tz%Ja)&&dyy~7oKY(9@wl%Yz)jPIWMSds72OI^NJxm<&B zZm67#u?iitx`~jRVcThfA^F`R`rqG!w3{cJ1p**ja7I&d@ty@SDAu>@xEt~EMA8cF z7W66x^s~dQ+ZL&_OMU7J`iGcVg#I6>PaB{z?wGhc*w8#wu0YI@w`5*-vhhl(qVZ(S z@I{VzR=Q+c$+FMh@N2?lha)Tj$QHr0yUhIEwem$)@fmy#@|1)rWxt(fd)Ozf4kZ1$ zBW#79ni`vwjgM?5;0bJUIX=A+@oU?)*TX1 zxejWIc}YRpdYXdW8w3Z|S1h9cBK)ef%KS`Ux3h=(dNB;W{Sx%$^j%Y!+K`NRGZDf; zdx(S9A`e`-u$ZfJPZqT*nZXhl$+%jFpC%PQaaY@*_mXLstqvXGnBNNx^w*ih`5$_% zuJ-pKlu90wLb@G-ZM;arKN{D$g-dLy-rqmty-TB4bR|-)|ZAzRR_eC*FJ7 z-qHiHazf(97P4K!U7EWcoYNfttKq+^o$y-F@Y7ce{jc0mZ%19$2l(9>~A5f1= z@su_4EY@rA=%mppzi?Y5CREUTuAyAZuq5(_Y4X949tD)*;N-MQ`C2kybJZRe2+ zXWjP;fagCn`DOe$XZ<7K0)59iy(`}2)8)`aO)s);k$P#sNtGDOz7@|+WfFIG&x|Qz z*k2a8$N-vJa(zS22iR+6uToR>&KwyxVwx;CtrgeFwb+*SI3zJ3BU11Kh znxsyZIniafTUt1(@Q*DAlh8wkBkd;s5n1+_FZ;ae1CSC~?r@u=4pIZDKD7xKZN~}U zO~3n6t)_09lAM8)RS_Dy=#3B}VsTe)cy52#5iHFD`)-1>^ptm9zvFR_AGKp4))w)& zjaqkRmCTGY`JV@PpxyT4(w|p?Vy8?Ipz790)5=^c|uEjApEUc~$YVw!!wvlqJ*ri8AV z_%Ddf^hYz?S26pIX3%5S3;$H1ulAY2WPm2CH#`xK%g?%$5Uwh(?>NPk?u zW~}!Pas&TvRPvMj&)t{MiI=%0jGKvEP`@%Eca(-FFOWC9%ezuyYmqq~q8&rq+&i+h z?oe|bbu%kn;kq4h(|Mbt%2VqhP9y$jdf!;wqEz(gp!=zeG=p{Z4G5$-OF#8_W!_Lw zzJ*^+R{-&~5(!0|B3wp~5kG}7sZef|UhN+4MWV6g5&%W@cO?%q(T|GOIu)mBAYW}R z0U%WJ@FDdOhKOfA+(tHRAdyv3Ah58^!ja2o$65|sqEXOK85(Jm!BkgkC(fo|-;h2|t_hHAyv*TDu2b zEwo3^jnunA(m*D*%iD>xo6nSjK9+dRJl%g+yztn!8X=6DSVS}Ws@58Bd_ve2A|nv! zUg00mmA0~Xd%XzWoc5jtOkoV_R8fUTs-N}gb6tgVH7da25Sa#7`xE*TO0#!==odfw zd4FEdPv>%?_>--Oy+a+X3X=k>&9D1}dYNyfPrm4FwRykG9`3HB{R4e>a9On@8c^1L z#KsB&$%wl9=9UBNR*_$eC7)V#Bdta!UwJ|BAXJfd5KA_t9hM<$F;dcNQ4{eEurDdT18g@!GpfQVMnegkRUa|5~ z!>cyk6Q7pMmtXu7nYdzV4f=^XnU_%ZZfZ+|`a<91u_u4h-)00T9ed>#hUQhv$HUE%A za^)Y!j!x68|3=IqdEbR{dL5&9R+U`|v}{u{bV7-*^)tRyWg6~C zJ`po73s;lhx8k#f%_n^Iu7>V-WI1<42mGqX@-S+C@|LWFaI=^_H$ZjE?NwGd3?5jo zJ*J8;tLP(Ai}HEWNWF9l_Bib;b{C*j;^{yWVs0?(uwA71ArBXGv+(%HsYuaFF$?$n zN`et(70rtTunFugQFRE_e#29A(-p1S*PPDk7g2v3rZ!g!nHoRmvpc}{Bg^Uoz&(tWh{mRK8`msTj# z`c6zW`%GUx^s%D2CnX}r96*qsP7Z$IeqGbqb4!FvzYwl&`+63^K94#`4W<$i^ZA-fM)I3hAKBUN?} z{Q&!YQs!Yb_eW~L(`-$TlyJvOmhXpY(I|_dXtHMf_^Io96&JAU;|q@q28twi`9;g< zjs&nh@W66V;;qB=Qd1)f8vTuS#t(f5AP1JQK(#0po}PUhc?g#4a4mGA5d42vOy`pa zpANU|VX^@7O0{c%dqKr?``-NH)PVYmJ-I9n8`~;nipz`{4&?Qq92Af~UT$KQX)*D9 z-1IooLWR*q;Z+!(ciRGU{Gpu=G-ESBANN8ior)~M?E6=zdI+oJ^OC0M!2d^eZg*bH zt$T=&$+?R_h#kTz+M0a)!MB59#qJ;7UbCVHT29V7+VNHVa=H2Fm^5W;{I@*c1xd@Nw)` zrQrfqx!XEDey31tXqHmLg5t9lR<32@2R`u=S7C1q5!#UDzewme77$5h7|p*Zhb8iF zt1vy*$wzk9qeOfr^4lwkM*N}uRqm73eQq!6?jkiFe<|n!5T>8bVFQg=Xn`X3zq}OT zmg7HkDJWC{#fiw&%hO*XPIc3Jz^Dq`oOCg$4FYug@SSF;W*h_nDVaOz6=gjWBI@i; zQjk2NqQz<=uR#boYs;idg;SLcZY|}+qA70ZSbCgfPDkrU6bCfcI^gfn8$gp zl?aX4H?N@Ydkyh!nj#jqo$_osJPx{P{OCYhhToG}Gx}ymt;*n>c304hDm3N3O*j)1 zIydOXz}*Pi`ReD(qajboWox8m%92(vY*|vjWkv@3WS|MCsA%qkqCd5IH5m-3(VEkj@X;KdmUSO2e1q`x@0!$8P6dssJo~+VtKcI<~Mt7qZ*=5+5TuZzpNC}iX(=^^#(boy3Mc6~82Y>6LL&>AW<=8NIm((fm;z;}9w}yM;Mz0I9u7+|*mUi@5Jm4OGkuB&_8db=a1_OGW z;eKfQbOydCJM0dj6PTbiYFmnFGj7b=zx&De|+6s+WxqU*aS4K?9~3seFy$i zc>Mm-YrmWgx}ebmpxl_Ep!F5vkhyFvux8SRftQlapX~^dw|p=>lG!=SwgNyl^k#w` zglf)mO}W&(ZZJC=j5&^EXC&@09v{zoZZ&7kZr8IF-pZ7`YG^laLW_k`cq?D(CgU~A zE_pFGznKX+>~ZOxn0`SRli`7!`t_!0t+oOwki&IMdt%dBr@cOPdf{lH7+i#4^Uy|u z?e{yncW+cUUW*Gns@gUkp>Eg44C%yt5U`63CUDG60f&lwUxds`{jzv$Jvwz?NWsV} zCI9-cjDSKx>8Myjs>470asS>N59(b08UNgOsRhdw8RtCq(mVqqq{)9I`nJu~H<0hi zEh2?mZr_G_Jz*=AH6Cx5Dnd3h6F7rUo!$?E&4QfF;h|P6Yx78(I1F0p6m+E~W8D8W za;kwmFd<=DF3q7Z9;3b`*5AktnQaOOY>+@su*`|C(l`DZLfxA9p5C7e)aL&6vJ6iC zU0PUU)`E8N;L1uv&o4&OY#t+h+ToV z%2DoEs3Z~CVTR(SrUkq2cGYqyt9QUoC&CV1ZY$}U6oG3J@kx>CtbCf{HSBb}L=t9~N!k(}B4bl<(61FJV+=e{X@@7PtpBrV7afmK-d)|tU8d)0GkAPB^tkO^ z#KAoAx5Ea@3|_^@vxOeiJuVM=&4P*@d8g2g#b_R_yEukw0iz<7QluAu zGnjR6oNd=XwNG{WF3%!RiwbS#Cx=ArP$|6y8{6iy6c--O~Zu(-HE-kh>cQGR798Gb|I}LMQ>>7ovTtFmP zrT^L|Pt#Y`>QiBH?9en_vWCY#D!I+Vh%`ENh*zSQC-kg%?1iF6r!MNcryZL))yma? zM_^N}Y*h}e=v8Qc1H%Bzn0;JmKga#Q%O}0@gxKwIix*MAwKG8gV2>l-Y9;N#AT$O6}6HhJ& zmBD;sNY3d4r0{G38(gC?qFJnlbKI5-sVA9%7v?!{MKqZR`EJyMgboMirJh`HS5(`pX-l^DgCQP0TD255T7n-nB3$j8v zcR@f73W}=E9d^PgrigRkjq+9kY&|#Xd()MJJs<0gQfucchjp2DCOPM@v*^z>F|VF! z(i^iK(O)!H6Ti$^6p(m8U@nN(OBFI78PZF|kKdrZpm!kh;a~_M>a=0=V}5-({&D={Sc$QtPjz9J9nj;PQsy+>RM&gIB~5=lpzTbs z7xqm_-cAqRYz9Z-hpXzQVlI_8XI111i3?#o?&CslU!XEYSs%d@qoRT2Df;L2n|{=# zS+HjTK;LcnY~G*aCFduyiwZ?Ua9Ue zi{$Dh<^FI>&e_kk|^|A%AUIvP8G6zy@6PABt~cc zV%mx&u7V?{7S4hx_CCNpj;M4peEa7<{p-j`Z352Yc zeEJl$NLvk^gr%Z?kr3Vls18XIrYiPyTK=*~9yKEA?Yp8wcE8EZ%)ghp-cVwY$pA|H zV-k6+Xlco_cbPLqZpS9=&bK5}i26VY)tI3J3sAo>5N&U7^%<~-jjVtRYAZ%f2}@J> z$sl3okF<)mcArCf7Ws;X}FMgD?EcmucErqr1DO!jMTNa`nyp zpI#}C51(o=$SkZMBF`GmkcV1b=bB(m&v*ZJAtQ4_^#FFcK@W3f^T1XtE(njgEg5Hnr{7=S;kz6F%Y-6()&-)h##Ew?0dY6GsK*1Jt_gfFv>TX0@?SjwW1Kk#1 zR)f?AFDOS_kM`a?nEZEjcnF#nuCP{ln5rBw*|>uew@%fT@L#Oc(?3wpF(+L70+XJ1 z__|bT!^HVDJ8GZ9m0XQKsX9p^!}&)-^2zW|-0Rg7Uyy-KO;)_XqPHH}q;HFrb^b&8P=61tgFeaAyqm5q42S zX|T&#%bhK9OVk}nG@tsngt^S^=G&AE06Vfm_wjT?CH%}lW^|&d(G&R9;}#Y+XS7#y z7BOI`a7LL6{U0)Ks$aaEPw*)zHVChJ~@x80W9zr7#nAGHjZJXOYr87iMH_uM_s()g&8A~7*W zkNfVlpM6za-)lN{quQnNnAG}4)v#{`h-W?Q=w!!<%QP3Ye6_IsE)z^i6KAOnzR=>1W3QFwGMRiM$@w1_!zM4@`~0XQp0kM{e*t z$j(+nCMHBez0yd6gnnCH&|1L!?-^XtgFL0z-MMBBqes#*g}cG_SND$ zTI_g^skN=X!v%5MBtw4ru;YjL*t!UUo7TfH%2inhrc@dDttq6QB(ArY;i0JG%E!&7 zazhi5e%a=nN6m5JMROw6asC0a;=w|Vy9-LVc$bS#BCSPurZJblaTVgbYN4x{WvCeJT0&_bBYJ z+TYXK7)0OyvbWF21&qTe!CGG`egD|mD6$q!qd)Gm2J@NHfcU5N2mi!GL@Jtq-UG7@ z@n(0*LN@_1_s@L1$I%(R-{kQj16_V17wEBvQItEy|<-?bwCH^9{=L=Bagg8q{6nB!pm71(ZupjDcSMKMkK}F?KO|iME_mb9_eAHFOH{}- z4yJS#V0Sv7-yZSGF8?TOEdzg9${S$3x$~jJ)Z2t25K}|KU`27{Ok?kCdy9vyO_`=D z;Jz5YOV1MTPKogFXsQqmVqD|KWV)TlRWz>1)m63>Bg0#c-PU>T*?R&pACJLJkg@MS z5nY$bsi9}j74o*}x&b5a$|9W6F{%Fmy1-(gmaG{!6dBhFmV{epu|zY#(;3!rXmY^G ztVnq^IE~a{XYy4D+8hEcu!cs>PZ~Iw+p-)JuX%*}CYsL~6WZ*wu9;wmFvL0hc_hZO z=cO@uh{97P#0&Dlq-S{kHxD~`;jkKuAQAHzB-E96f%H)kjKL64g~W(uZW?SL)mH%u zR7-^%%5I=%T^xuR>V}ce4ZcY&d`##8->{&*y%!?C))xP8y@^$HrQIcYOeYjalHNpZ zInh|1#Gc+JmCFSZCtl}kZ*qSz_kXiY-7xhl693n845Al5n)c8SR5&SsyDX%F-VJKG zQ5L#Ml395Tj2COlLKE`bem-|Zc>$vu=JIfulLCw6c%^0eqET(@+ z)<^EmY<|FG@)c#hKdraK1`@_?TQD=j`LR4n0)rt-BF#LW{^L-k(Z6eEMtOXNX+{jK zSj)9*z}jT!Ab7w+2Kv_0J= zV~8S67T-+DY1#(KM=CPHzSiOW2P!MqdBd$L*Ur&6Qiin7WqvocZ7FSsIJ?Hdr{uN_ zFDlLn4;==uLq^!&w7o*5^cyUo$HwjQa!w{^@GHD5z!m*&(j8N@`OF71R0iku@3xV+ z;W_071XUrRXhM3HrH(qhk(j_&Y(ByjBwPk+q_)Y21EVf87oNq&n@V9F#J8F7De-Ph zdDeSS8>c*(iYRy7wrN#11PahL&%?demstyS9o`|Mo{lOi3A5wQmXSI0T;Bytg6Q5s zHLqZt-L9psMSA@y;-zojWrjODR@>sF2kuJm+Q@{o`qNO7LaOS+FJFVG&#CrQ<+aP4 z9-+=h|9X;jn^Y@T7kS{;!={aGEoggXK1GSWlQ5=T2YL6>n;HpXnRWHMwRKIpgVU7L zIW740)+Xer=m=o%v?e$?J4vxe@t? z;YRW}1b0Yz1Ji#+6&Ma20h`+UHsMS*KW2xP+>$yPZmo8|l8(HjztyR|_ASm}N43*M zQ5!pqsGVBh^w9q$=Cg-2EP-Q^j9%X#T&s6=#R>w}O}JDB%jBD?M7Jv?=JuQq{k;$uACSG9_Bl! z8EW{M0X2|pCDnwo^Re1Mwx+h$emDVT*nt@^8EVA1;%I7;vi4z5bM{xpQEKVHQ43;e z6N`tq-r-FGf5Wvgh78=wFSgL8ISvwY-@?GaxFbhRnaXdOtV+w(Rqat)(Olo+?<nBJXq~2Q2E*st?X7mB48K#N) zH7fBBpxE1hNM96c1S0nBo!gB*t)l-#cJrQp22scSk>#cB;RZX(pJ{9@WAq-|dCBB& zcGc7No;+>0! z(VNYcfBLJuZz_)5x+w2(TJX40=$Y%k>;xA11|s4 zQbLtZ$_0wvxQckEyI|@q>WY-JB(fz2Hnd%(EpDJH()s8Bla{h30CV0)UKaF z!V;e18GH&u`vE&Vi7F(bA|&>&J8)+3S_lnxcQRZVj^ln6DKWA^_@wb;SUM)iX!p+6 z+txNkb!E~0&mWuM-%LXUxsvk6aEg}&7UD^$F3MVN#iWN~q}kqKWpw52sF;53r$z2N z-<0mEurhEhc>dWHK~{Y63U?+dNNs5K`D>%96%+H9jnS$UMS|}1q*Eype!Yg2uW^AK zxd!Z&h*LZjT*x(}%x8aa?NKp+f)oHu@d9SX6F--APq%F^WH2ohiQ!|rRfc6+#DBzr zmX@9;;|&r+a3E<)IILQs-jcXeG%kiIger1{R!fC&(NgeN&4b006QNIxO<`d3&UJOq zF>@ur4#t-;T9~`DoL&ENoG(JjZ(;D4t!~&@b?kl-o|q|Hh=Z)~!f8Z3vP{ZzX)IDa zHj!P2hKqFZ0-#9oJn4|liD;&5yNbkz3dfv=aO7=F2c4{;`LBu)vgWlfhrO2vOQs5f z&QEL$6=ySlsO+*N7|(&)KuPqTfa0_jh(!3Y-U3Yfk`o&&b`eWWV3V(5wSBMHY&KjI zb-v+S)Rj;X=7V#93{_|yDd$sB=$FL2r2{o9vw-C<1Vx9cp(7H%oZemPeY;z6j5)UY zS;6~R>MPX@NGrtlerPs|{}`+K%18R92DC&4djxZXqnsEVWJ=QU%zYHE4+`$#FEzgR zI}-gjRU^-*I|n%_fAPBJ;FZ(GM!8JEv?b|oiB9dgBvBJ4AsP?+r$LUWS{12P^{JbU z{fHq012?gUOa zdWZklcT)Z6iQrolRZbz#r?~j&Y4x3pKTSPh>XKf!hcz=<;ag=(8-1z`-?82ZC zv>{3C8ylDy2-=%tR~*W|93wyIXz;ZhXsyoqe%&;8r<>A`H$R*eDt5ox`qAwNxf_xO zQ~P&KAbcy*_1xqvgJiQ+?;PMt)iSXkFzpn)B>RqvOjx0k6i)mdB)-}tqT|ezk)8ai zfTG$xU~Ky)5_dufrimXUg5Uqalu1~&f z3k_W;l$o5Cyh@k?aWe+BO*u*vr{&hd$;CS?rrdNlv}8`PZ(tdw#d8-^#!`KbhZwQP zpnEjS{Ni>5q;{|i0gFsE0yatL=CsGvM&4Bw#g$X#g}%LC$oqRxXQ_w$PdSF3NB_I# zaPQH2S*eT(ipXZiJ@X$4t-vfe5LV$`?Og8UnTkJ-ItYcglT+UQ4wU2g6ibLK^$CoU zp}OfLR9_Z*JVX59@-9nr{dP`eBvmgStrsZGScZvg?{N^n2GTvG{d2O}d+$}QgmX*+ zv>?n-=Cb@X%#G>XtMk7Dnu(eb>X%ma;6yz`0i@{hpWcOS!j^1<5v9`!N}_MaT-q$f z+qLN@X@x|Liigj*_UDt7v28Wmf-4ySlnz=AS2PgDs9Q?oMUFZD}@i?<%s8t3~rGKT4&i; zu+3m}f3k_CraFSMpHC%RoDN0#6I_pcAU~aL+rVRi`SK8r{F*yc0jJjISjUZ9hD()e zWq^o#hv(Y-Q7*VZYsKh-5kk0!h4I?(fT=q_#~E272Wdcs%D*s2;sL|%av3Qi1jg_x zgbJg`aT4NDeMIlfySsKEZ3RXXEcM)h{ATHgm-1R9~+86ruo+rTO?8)3B zt-&Vp@@ygp-$RxHB3vr5oDw#%9{V9>Fgq`r+*2Y(LbZC1t`O88x|bNI(Z~))a|tgL z7>-0?#a=|T%NF~Sd+h7X4QTkfzxAhuYSwB@UHqq|$B6AFK#-l{JV-kA^S}T}HS8jDpP zk2dSI{^!)s3%CBaMb(Om%)cLrE7XhkA*wx4iYK*FBtd!m2^|)VhP+oF;2RP>_ZVg# z9CkCW>9Fc8MTb*Dw+K8Kkz{`kSf~x^)Ppy<92^j;q|O|L5TPqT+!19b}&! zxfnG$obdajA4k>SPqmsz)Y!eS&nxw*(Oqv6W2UrfK8HqOrnS6y&oeQ25a}e-n8fd) zRLsX@+n>~gev78#G&ih*O=(k#3Yl`17GFzcFEnl9-Q=XUP1PLvR!E> z+I%eUae(&4Sr0tlwYWMR0i$U@#T=H81dm!WWO9Y@`<)$KwjTO9yMh9rSb`)2ub?uv4HHHQDIIJ zX|q2?XWM60j&H>rgl`U-qi|jqy~^(DhR)MwGMyycA;7Yo%0KCm-nuX8<8$q-nYg#X z1`j)|FNJ~d=FN$)k^Ju+W$%`WTPI4)B*fHLL16=76-9_)K-?M6Yhy)6vE;@FhdPX+ z7<6g8Bo0&fW8%;1CcZA>_V>L(9svq0Y4{el@)%8xd{I?Gp3H*m2nt@X_E^>zT$P8W z)Lud|*?w=r@vF4;cjV;S6BC#p?h)(Pb{}bv+NHYiE;-O^`|wdXRvJ32*}%b(?@=qz z0G|152tCRKLrxv*ee43eo|LO}Yg6Kn(by+=pFaHiO?aw})|CMPq91?p=jNgg=ytx* ze=Sg&^O0)6IvT;74<0*p63pNK1v1gf zxj%Ts#QPJLQUC9nGCU^qa#l7zIO=OYmimBYjYi26@E0A76N>g0=WX4z5;eL&$?3SzldQ1e9FdXi&Q@$HVIL(r z0{20I+s)-hn{5b}#g^LdT%P{#-la3mzZZBrE@=qQf2RRW61fmF#Hx|ypZy^vbL-tT zEkVJsdj{Cz4z*9J5SNvS@sPF_%RajJ+IbV{&j^_D%SyLvwJZCdIZ#dL3&TI~_zmnsF!56=E1Y$|`s|n^Q8~wSl)S%yf0B>qnxBM4K4DsMQ6Fv5F60 zRW$nW=%wVE?0NXH_)Wk_R3FdPPJQqErK(Ho1)2a;l>^&ktHjK$*I6I9HFSglrEzTcp@lvbfc!9GAcurm2g80I|Ek`54?uM zb7_N^XoI|&CwA`3NEicHUODVP?&kGt=fq2e7J*THibh}Xu`RfGg1{(iQQDlKH;7k% zbQbNt2DnfGw$Cm=S;A`0Qj8u<)5dBg(s?wc0P=kH{L;LsB4wo_FmAuVLfR6kLV^qI zFllz3rxQV)Zv&G7;Lx>nt0>yZ!!nv)(I`Gq*t_Yht7~_RbRW$Y)`Y9ZTQm*kGb9|1B8fOL@DbBzkzV*9PKiS0lsLh||F=#n{{Moc#=Z9KE_cl{ zbN^?*ihfU`a9|^R9g)#VCaL1>1=K{gxYi+)r3f$)N@BfeOp6wKf*rkSS9#Ql(1BDC)?W1!#P}u+n^WTcUr-Bt>4yTu(U=WZexpq zj^U2c^bGm=o|-3)4d-t6U3`C~-GV();bKm}eHoI9G3j3l4ow|yHd4JC&E7I~Egk9i zJnc?5Pp32hHf!&rXouA(!IJM=@umfVv&LOm^6tg|$1y`V*TJ#vikZ9t!gKz4@vcQQ zLF)&CXcPa#*HVN^bW!QR_nExK-`cbkEm3nnEQGWx^nKp}@Exm)b?n6Uz47x-fy|xM zJDv#oyq^3a?WvY5_M8U$WJI9-o(sNn3dv+5%sN^~6LiauxMD(7be8(Uuxd7R`>$)E5YX$#6xXCq#DAAmfmT2F?#Zvvif3@oz)^*1pnr7O z)Xcj_=YP%KFbQqmdW=PX-w`Bvel6{b#rRD24gL22t_PSa;F?6B_f4U7rnD-TixY9T zUQ!RFVn&e7m$zO2*;=*S3;a_Iz4#Z{jO(k`oBVb}TO{>pIBCnLiAgoGy2i3V|GM6Q zTa_}jj^ZLfdOVP<&wNjSh^cof)2>bou^A@& zJN&?%)hhizW{^Qk>aK_t>z<<@gKA)HV308Z8)>b`_KQhiSN}sfWmbIXB6Ut-DabIh zvyFBu3@kT3g!w=M#t^58aB;q&W9txXoj&Dq1AL+*G{HA4a(Fm0^Wi4lo4WoJjc$+6 z`Vh_)k3I@sWhNkMT$xN9W|P+YZVtZ^U_6io1V9IhguSfKtluac#3l`Lt((Nj1@j>= z+h>5d3;J^3KC9$7eH(U9MWj97ub>d+Ai+!#HD5Q#&HC>%cj!E@A)#M|Y}jWV+jkyw z!;9|%O}HC}J5%6g`c+@gUPpK%RaWPpbjLh!+l#50q;~4|XW@--Kl(HOk!xQgSml^H z-=)KyPMB4VmH!fd*j7v5sWqq_k_rP4CIFY04T<99(MU9FMrL^z%RhL#eB$jdA~~S#d+iVX0Sy=ACZ^x&8=|2 z8+DG6tx7XmosZDBJ)D}*SFI0Fb?147@1UGIVwkP%or@aII2oMfeZ6Xk&a;4v0#2Fj z+GA(NyvAK0I)g2jLQk5+?!m z(MXE+q@fq4CZgmsPmVm2Ub~!D?Q5p(qgGV5;&kn+{ZkNqW#M&KaboQ%{Mg^{`ipq{IOEEhP3+ zOykUgzRpfMhR562FPFs&q>Pz^B}c07;G*9#^v4?o41xqrQrfWV;C83FZ*zD@Z1pRD zKXQIz7aRRyka)0ilM@^N>T z!D1|vMgUThO$pm?uk_%-3L_ZA9|^Z|Uy(ES7{|JCnF?iGC4~|%7H$5Ndq$3fK@$b5 zG>KF4oRjJj4s4fGGQgpWrB=iYiI>O40Ei^g6|>Fm6ccPsncWDQTTyRw$p#i+Cx99$ zKMLgz2~EiVH=LiLdn^3&Y-?q|@teG@ zU*hB@1Zfwfq!+WnUkVMUI5+#Log~N?pnuqxr8~KQeAznuLi&~kw_+-H_I!MBXi8U8 z?RfXR9HqZcLy1tml@}}}uOfrKnn!@}(0`>QrzdafhQx8vF>TI(1S?W_uzOeiMvm~AjN$;A$sQO?c%FGdHG!3a0d8R zyv(@L@ZrC&8j|ObV zvj|$-oov~sLPa4uec*n#1xFkAP5&Hidw!NRm~!V=8!IZE$Py|@6|-( z<|QhithbOmYivOQ~!X+6XK3z@Lyh`XUTdK$vG|>~Il@13o2|8XA`v=Nb zZ_hT%5d{pT?$b<7%V(AtQy$_E%p2LZ83l^H^_7W< zrjlEe-pv#$bSph7SU>pkA$GDC59F@ii~;*lbKF#>zG14$dE>zQfvd>TMX$Gur5@WZ zm{k&6(~D>{BwZtu>NT%)C02dz|94G1aMe3GIxhfRv>Pm`Ad|!ADR^eH*$ClXEWZy4 zDB47)11b}W>^jUh*98U-RgqEg2~os>LyfVsxt=WmYf2eV#1sYj!hMrfHoGA|$-<3S zfO-bcm6Q72#vc_f&Z*-5U30Le^-CfgbY$vTa+;9Eg}AROZRQ3ZTYei{xa9Mwtp`7{ zXGlc8qZ9*!8qa^%WWovhq*K|38?2k-vGkpv7Mb+dHs`NqPXBc`jsgaPTFctGE7BHl zbtM9KXr*D!@RxUZ8)bH?39mkI=TL?ZQ7Ev|2z(!ceXir~BjgHXDjUYhB7Vzy(yn|} zU!5KK9=QQOhJNJR)!?#965@A0Uj9fF5hc5MVjqOgf0EoqINlAnAW z4SZu;_JOA0F@lJSCgz5q%*%g#@VP-J9-&0b&%dwXT0qGJ&M5g-WmL)If_(EU4x#2s z^J%ots*Ev)`Czvon1npqitt38`BsbAdE0cfz?f${6pX1gx_nCSZ*IvvwAs70aMx-Q zN}D3AF+TiG(5%1y8?yWld(#fX5hJ6A9o_|`2xLfr!B=K1?2o?Y17G-Nho?TolSdq7 zOj+f}a$}Z21I(8&k{fCMfRUAUoUaB>*rPJA3)t=Vlr72ytu6nvk!t}KF3N5-Vqc7Ve^gyB^LeJ2e%;e_g6bR8Th8gn@fwVpKIUCkzdvs zz9J`IW>nt;lM(lkvAOzhadq`|61Y$Pw9mdZlYHUlS#lvnvfpXZszTwgtRl;;6t;{! z7e40m$IVMgON5xFMuuXO-^qExm0+M@+G{dl@XMK?`|5&0uMH2Mb|&Ca{z64$X5hF= z4Hd|@Tj=1Yw?QB%GvDyC02L%ZfV?H@$2Jwrdf$6NNjymUc?K<v&*u%K?TcY3mP}7}py*$YKrB5oq%G*P0;G*u#-|MMo zSh!|gF?FwS9{E08{uRVA=nakpsB{<#pGqW*?erjs+semag$}T`(H*bU6%RKIN~x{! z`uP#zOy%AexoO4o3F`)m9<`en@6kw3Z}+ZtsH@ODx!b5d`a|{1;qVu@gP0XgJ)5QS z#*)dCxxkTUj9eSrd4+G+o_n)ZVzuo3!!xO2meP*1{_T0;oDVJvuBnxG?~|JPx_iot zdoE;~x12d)z9myrt6t3}1s)IX^bz_bB#G=7`T(sZxxlt~`;2jx)K^x5>Gwy3L?>S7 z8XU2;isI!n`g`{u?6GnRTN9=C>$Ii~Qlz#_ATIL>cWNhb34__Z`5}<7 zGWfI4je^(`-TTK6*KswYsytnx?kXn}v?ZG+ody~`n+gMmyiZ||%Mva9>FV`dgzl2M1ZXA{`^h#JEf?y+XosF@4t`rktRK*Eje*)=X~v8P-#}dW@uTW8S`?s zQ!B4BS<2P$63oL8J(-+TSS*%W9Whzd;s<1AxgK%Y{ zIYrq**g%b0&i4vni{|?88Vn7zOQa5)nV8hIuWQ)3!i#iSveYs@JRA9nBa zZu1d85f6AoWz{VI_01hqqNH&K5;Bi5WsG+h$|pQv1>0?GpMy3)oZ^%zTnx8>f z`S(Ar5vf`S=P-eOQSa%6i-|4%1lqJ}>6Hku6-ky8G8I`LKNI=I`T{%k#QyMT-H~(| z#yCYNCK}SGaaK?7$tzhs(pBwM^YrLcv-5R1*H%z(*4*U|V9sSm(rwC~rMG2^9cD+* z3$!kQdz8ngpPFqMt2q%BhFKJgNa7gKZhKTH_5^{a zTQJ={!A3)VY3+ZP@CqZfo~XJVy_lgWb>I zYEHW(wb=kX1rhk5sTOf(?%{cC-1{#`T^}aSR2ld+c;OFHP2Sg4s#JyH;zuGf5+P;u z_RmZm)zTv%73EDYa(z}WcvrtHway|F1r}m{)^u26rUjY9QtrvbwN4bY-v3wu|0maZ zo!~r;y`+kxu2wwu9@^vL*DUCqu)0Ub(_rk026Fw#I2s6ZK#7ZIuPRzuYRQzg!5?0p z71o!s#C4^zJGho;XzZ55-^B<00?@IqxCsxWTyh<3%Rq+H``fW$qwbUw&1YfV;A1{_ z4^1I0X(W~=LY!X=kN27W0w^dT3p(0>uIrEGmyp@VKBQbramW!HSjT66oD7$2O)M*f zBzJfd9I*r8hg@26XDLP|kl7*LV2mFU;gdoETbV4-7Gn@km-zI&X~!ZaQ3AKVZOs=CY6?JC7^q0 zbTacT@2%;64z;`TbZ(KZ2Lx}!mEV^rk%1qiyL~{30k2W1)8~et|45+vFHZ-=?!WUg zH&j8$%-4;Va|t0TlCIx1yUhzchr>OHGDCq?bWL`Fr3wDriHa1qC2Jvc_$DZ)Yg<67 zH$rTfCgk-?2Bx7FzwA5K0lh}-eec+1N>}_D|!X^tao46)Yn&oUpW?Vk52aF zLTLN=WRc0>tR(fV>z!;_Z^xwEZltmzmua4B`-XLCpPsG7D$Qb+Y^U%FD4w%($PN8- z{PbFZf+Pr=fJCR^`p9gBml%!M3Nr4XjWX{p+vV0De!G+&cOSt_6do1lU1pG+hbD*3ap0?~Ho< zr)=vf8>`4nLw=xWdHK~ce$kw7>hSE>iq68)%`vP6r6#@%5PEHnvF;%T?2?l5&)mcu z{_|##8xwa*ZCz%3zUEqy-dYH4Nuix$`@I}?dsvuv%6f!^`QMLva}W8yYkV?dV8&X| z7OhjN2$>Bs=_TI@+id@sDgfQDiv`A1R=LZt+<4F74)!q%>%(`(s@vFZ=>aYbjB{PN zFw$b6;@XMAp-dg#1*Y%AcD6?zBgk-soF*Dp5TE@nRjpBZz-TQo6Tkht<_T8Wh@WxC zyjldnps=iF*&IYoT+g2x3&*;1=n9#WxYG9^(7TTsRW_OjGG|e(*A`}Sz2aa${uF|W zDq%V!oHcLypSlKfShh#CVSLhxwhMJrvcxI-wf!mbT$?QMeNePfsn()?D=@JHT$Nnu z`~+gc*3MIFULB3m2N20y*zyIVbC;HvQdFjQV%3x;U})A?-6P5G zy<;0@Deeq^+K+2(P5{pWYrVziEi&DH=!A27?#VarUHSO{-I~z#QLA2~8~(#4rC)U@ z@9&(y?04dJ*}L6eKNgws46c@uAn5luQ>bTh#ymIf+U#EUocNH+;$3Ms@)5=coV`%i zK`0)*{Z`nZ+>ptKAN_2X#Phx0Qm1hO&U22bTqbIKskT5}v{$}j=fyTTk-n3#Uw9Sj zPH1A>)akTzh_gDWf2Mntb{Vr#e$J);NCi1zCx`w(`k$|p zHpf+j-~a4_-yi>D8bpROi!H^gOT&?fz%0JI>4s~qJBEhD7b@pGteT=6L*eI^4)JY_ z_foDGx@|0%*Wt&-@enJf#M@B%dk!6>dQLTs8&dEiC6BtY_3hvUDOX_88%?=Huh`Z* z9V4c(8S~JTmH@jb2g1)9oK%q+M}r_fMW~lNrXAb)=c!0D7Wk#*pP4$nzG*(H{w1LTpsFo~`zY}t zd66NZ#y*0){CFVo@3b(Z&s}q7mRIjb!#yp-8Lt)cunT$}sFR`TFXkPuqe-@-RyGiv zIF_lXB|m=2$btuMgt-D^S5+%!r~vw0|0(&)rxZYsM({bqGN%OnC+}0E10uPaaY{|o zVJ?UQ?rDP6UpP)Np0Hg*Wq($}o_?;Res;{oVb^ueU+}%e{tMIzi4k$1=DG}u4W|XV z5Tjk*Z?I^lz1OWu=2GI%`o$1Jx1VH#ZCd-cudsyLs+U;_#TB7O@f8*RGt#x7@AZ+m zY@VT}sH~81y8`1K0}`hx>vX*Eq1M9qKP9>Vw|$zW_mSAqvaK_=bhMllzePkp@CkX|g48AkaQzXf`Nd6Xm{M9DHlE!j)e)8v?s|xZ{ zt;YzB<}=F=rkpa+yLtWg>))_#{N_JFIq86cv%mv+?s@hrFT+mvt^L#u?9id9ir5@J zT-|Y5Y+5=I@Oe0@Y8Z75s8u62IUY!K{2l^2mYnn#RdxTac^PEbX0zLFMg4J{rs*HO zM_-8?oV0C|fA<2_g-R%Nr)yM}!NuhVTFZn*MHRd*AeVC>jBSYO&N%HB36rj>gR-O; zHsUn3xxNE_7q^lB_eX(nPolYen+FZXhkg6}=mE3%IFc3`I>YTtc1v(ZCL>|rT5<29 zIlJ_me?d2g;s6K z?8F|%*Y{}dF?O>jEkZBoE1?*HR0hV(XKgkvpIT6_TyN8wq7ZTwJw*lA+gdN0W$n)mzSBTjY$Xp7 z#1CuYqYnJMafKCIsElWoOk#Yq8A~`k0r6Y}JU9$G~2L+hG&%7--c;{XVegy8XGGFN5d9 z2l8VTkxM+pn~5zFv)`&7dZz#NY_Fvo_LBc|TmiVdFG`J%^9>KOWu?E#S`tRPZCn!; zYv}b|k`OC?iUAwr`-||>dAFzun%}d_bBp~dXUybl#4o}`(;Lg=c2<=;CAbp55Llu# z(>m9=K5k9^{V>aXY@fyaQcUo@f-zOn6pT06y%BJ?YTn#3(OfGx*tM-~gZLVshfEppqUD)1$6AGe3CU7e2; z6Z2p7vuzYODd_%Jq>E2T1An{jI>>IIvJM8QG z?P+L$C;_u-75;fLhE}i6H{(Oi!qGc2qn++YT!Pnsb%bc7)2d+$ye$s_bN4?fdTVGI zbNfQ|DhKiY>ZfSY7?iXst(OXHS%zzm6Fdp}tcLdSb>a zg$_-(hgPID;`l5TapBD(hN-VZa9UAZ&O7&wlg7P&hQ?(X{5SR-8c1j z;{%mmXS`g~(D3v#D?;*L`v1q&*~c@zzwy6Lot#Rk`D7X{`+u)k|anKshfc`#q3Rmmeu=btd=$#N-gBxu$qu|RSxpyd9c+nOF< zvjdR!fc+U+!MD3g{!7ksBSqK;wfdquXS})_e%6z}L$@^+y{WtQTkC5U08`;SgwT|`(s*UY7nG(sij5Ma-_fz~}6pNQFYS9Rh+enJs` z%`IfYC$+L`=3&^GoEud`u+nLw-eysyqb;{NUkPsXffHb(`K(jd7hEhgu@Lu_ED2@v z=8xsy*5AL*Asg;UVri3fWDu$U;@#Tx(WegxmRn2t67Fh_H%?Wc!2BI^H0HPf#f)}f z{mH=YWjEsRyMYvu3%wS_n|;O8pw;Vq+YZ?1kjGm2oR?VZA+THAZ1bq=?r5K1I)irV ztX`M*nTbClYPEO2$T)JKrmFF^+k>UeAxQUV2lB`Fiq|Jy$Gt$Lv|ekSsP-6nQ%J7} znbOeucN;4OcAiT7F;{o`?WqH^bMbYE9*KCfxI2tZ2WsnfDe|htWuMi}J}VhX)+khS zOgGygu>twTIK zv#{;Um}%xvXj`;sn)_J5AU%iu!cm5QaP}aF9X=ZgRR*etXsZiFW3M7OzCRG7^B5&= zUDf=p@`-@BqzttOTn&pjQB`_c1oco}t|o;8+qSU>{XO3#E^nOUey*x*GImRXL+gxR z46j(k`~l-`%%aBMfvH)kz-g5tIe2FH2*|31DIJf435%j$YDD=}q){b2%5f6DbYDpr zQ~rXt$pla&JD}B?3Ntia0*|=rKGTSgD65KrdkV}@DR~BA z%V$P%cI@jc7aPoxg8$yO5Zq@T&N7j_Ya?AC9ok5G=KH$J@>85Q4^e7KJY3=Ic9tY* z$NFDg>FXu1Dy zk*qU&;G!q@tL2jso)&>eNgcCIA3v%yeJ?mAxEs2~gfeR_oCaYBWP7FrP95fjI7__2 zb8Fjag_@DaUBRDqe62*`N^_}xS5t<8U#U5XRe1U#CI`}?F1z1*@=m2Z&G;+I?YQpt zyjk>j``g)`6(>hZ>Xe?PwF8=G!suwp1*F}fF)8+AW!{;dvoTsKDj7q!*iP-h^*8?D z+;MXl%!+4cQjpm@2AZAYRcC*dQ}mSXh5=a!AlxQ&Y872wa>y*xJ5H)A3N)e9uJccObXorA3S0(r3i;(VnkD8O)ixjzP{T$2_h#FJNfx-@O#UkWq!c+)dp zk6@>n?5{A^AeYmBJo8u{xMRK>$;Xhc0dugNeG9U(ro4Z}1$J)y448LH#9|il`iRpH zr|%zs;OlNqB8$r$Kk!;b#H_~EBA#;zBUyp&@SbzLI<;na%#|8Ivop0kHT}l^$FbL` zQ1Z2{&k+0OytbCTf2{fNzUgdff2*5k$-QGg7DTWInjB{K?OutY`kWF3r&fh&qQ9&r zDa?&EhBtq=2Gg#&P_gDM@PI8;>bBE^D4K<7A1j zf31OgWb+8JSNB$v_YD1jt8Rkjk>Vt!rt+cnvDgHPw)n`yojEUmn%;rdzRe%9;!|+(Mf!t5Hs7>}Cv3n@*_?{o`)YrEHeH#rHv&mUFw;w!U5Z<+H<= zZgO(RfexFW_3tSchdp%49BRC?*6XVDtY+y0z=v4WBsu%|s$HdHKi~Q|*n7gq`MY6V ze%5F#5?%_{iV^%0fZU@m))R&5t|0FyU5e!B4wEV-$6YXkV5ReTR?x%*ngg#@<3TyJ zLKJ6QwG->D9_7kbenp@3ynGtW!ee-|3gi_TRCd(T@Do0sq-tqeEI>NT24A$xj+Hl1 zYxFR0%n1(Ek9^6)V8nn^Jk~#&qKbB-7)*7)t2y$IsdcW=IER*k8l(y9HovZIK^ZyY zk2fZ2uoX5Z@@~I~DSMi}0Wa+mM7!(Vc^$Zv-gw(pY}#J(zA)^fakq8dv(H1H=QBFN zd;CAr%{Gvr>!@UDI1857WilPty*+Q=cU%A#KCS>{N%58Xkk&eOj;7V7N6J9J+{-iO zp%lziULi?444Z78f@cJs;dC~B?~xP1N3qqUA7*Si*fu5R@mdNbHwSAbZ>Skl4)wAS zlXo7Tx~v%joo3>%C2fB0#GbSHyo6CU4j$+vZrx{ioR~2(x_SkXUe}tq)o(FIUUK%( z`gR&Ph)%-X+COCEmijhP?TFa3o2NLpM%qq5zB@>CsgEptVJu5V!=&rYjLUQBKGJ#g z4$*cB6jI=Of2{e;{a60xHQya?w9ARywglB`iwe4O5USNW55Q&~)ubXk<{B)c%NJP>p9e86Ie~`GqIY zWB7JW!n?vZ{M0@4n_mO_Gg)>>j3 zHX$C7mZ;`n-E5umb`^NFTbM4ms%dK+kW+tTrc5z@Va7;xvtl$0d;7|HsncHFfYU|A z4ad_f3@og?wK!@CZG%NXc-R;8iVta{;Kst@ADMtSYRJK5uc2zaY;QiI8e1^sp>-)0 zeN=vPVV5~`o6|RyI>+NHOQ5v8FV;c@qEeurw{2}QniqpK_t`k88$R8%vh6Yl-{VW` z&1QbbBsMxJmc;d6%0%saKk26$3s;i5puLSUP5i}gue1OaR17ch8%yp@u-b*&-35lFJg*%MhWKz znnaCtU5tAC$uBoWajv#XDP2yT$9QhUy*#2gtMd*pt6G#e>T!a8bPh`3g$ot`Zu>w9 zK4AuA6q{P|qCNIH9gl$0ENu+7&h_{H!e?1kT7T z!>>X@au1s&U^Em6MLdW{RJ56+-mm<@cYBJMr&fFQj*D(bAxPiGB!yu)zN!i97q zGc2)8y`I~@QDasoKsiYnU6Cs@qr~{)?c-}=sFB1Y4L({Ax=yt*SUPw@0|bYu8J67Q zD%BDgdrQr~R0SPm++r>}Tc(j6zIsTbw3?M`W=ik1+f>r%_O0$GXUa1<{+PBa%#^7S zNGbNl!@leBUwi~VyQH=ccXcf;>(QrHi>qpg(R#O=@Si=ntrYV z;phT7=e>y=m7V`?t1%^r`w0MlX05%JQGqNHc>i_IG@QlpL*eWv*JFjm5{aq!>J;bP zkLO`~Ck>q-T;4Vy0N}poWkp=igF+IlkkeX~isT?Exa3Og+;`?|CvWJ=kytpQ3EMYE z`z6c&b;TPEi_U(>6oWreSCECrEK066jV=$^%r!vN?!EQU#!JQEXa84E-1?&q6tlpZ z=_GZQZXasZ_7J2foRqCFf&%LyZW|oYod)NX|JFVtt>Q^A98`n$y-_ZCt_G{?@Vx^3 z1ujLnsr3(E#AqR18~QZ^BzCNtgXKLj7N@iZol=o?yvVnaFS_2EvZp_@FmckjhO*$H z%+SYMwnsns89~Q>cT|x9dO7VcCMdC2)8dl+K z`z%FPcyJMGw7R6zO*WVpbTfB+lIiX9plcXM7@jzXN;RD=^A((`({xCUEN^SslwXK#Fz zb!~{dG3*S(P*q?N@9n5+=_De9jf;QWh|&QmsRlNQDgG~A9bT#x&s||&C2?0uKlwGL z7ffU&_c!F}?esL*{pG0ETamX$T1x(xr*E}WVy{SAq(v`MBQD$SXOT9#B^ELhDCIBn z=zFAyp7QG;wDDS_iN~*=M-(le8Y|iu-{kOb z3Nj1nf~&L{PR}f-*3@Z!WCHvBMmukdw<2dEKqOC?Ivh!xsY*Jh);J;$AF7D*5W2m_ zpF4IeLPO7WjC-T0`nuF?Glo%5?KN^UeO|nD8V$0TPAStjl%9!*;^Qg=@eS-_k(yV; z-JMf6>tP$K)HGzL zDM{s_vHxzP50^b~?VYnBgVTtoEs_2Gu0H!alPkgkv=^co1{?wu#9Y#y!c6I3|89Fk zllShi#t?p7z41Eo*L9;=DV3?VgATsJG&(NoJ9shw+aOWqR@stI{C8N*oXFRnpklYG zqObSH(3uKq@CcMqx!z$%6}OE(6g4GxmlbJ`$Sw2t&U@(^Cv?@_gU|JcPfjh5+gg{g zvXj1-3*8|Bt^)ezo>n*v$hhmw1Q=E@7JQZK+ zMzajClHm@Yxz-j`<*gZom(>-A5g4nlfZ?w-} zFj@ldzfDk}uvG20EW?SfFv!}1jgWjDaXkUsygW|nHPrJ*n^`LxwGWjpkzy`(#a4-Y zH`urPJLgF~`?0Wg38p5*?cZ(Vr&xKI+{R7d3i}O^6|EnTC>&XoXzTF1l_~Kzka8wF zB8!7|-wp{si|H}8I!glzJ3g=wr#FhF%(&}>jP^i8;KRBNp00a^muH%QX;+zTe^wR= zl&yjjcM3Fp?SwVd-n@O+44^%EN?0Oi-3$SK*icq;uC~m;7q-{}qhMvNuss9=Vj4xb znkQhpUaG#E#u>$a0nJj#aMH(~RKJw6{I+mN;IILBSW*0U3(;F_nO18JnMNa3Mf|=XUbZ@^AoC0yh?X7Cz}(rDL!ZP`3peNBKfA*9aMFU7iw)eko3HR_FakDA{7K%A9) zUm#~89%plHZVnu)`=MVgwup`_F~Cs5g7bnDsBDopHHFOg7vuweE;-5?SE$Iha=l(& zD~1&G3Pq;Zk#d%=rkP6AZe{syuR=BDI`ZXU{4{k@nX##F#K4`;nFZRi~vW z_+V8^&XW zGhFra;08tRaavBq4h^ux2ssF{H9_Y(5Ob2sn^bhIYH7|JU!wZ)S?14LWM(^~Aw-SG zVBCQty}`sBC#UI4$#XFD{0Rm5BYx3-rKOZ%vTSDr<;M#UAij04*&*U>CBRt0ZWQY#yjN$cF-o{A-*4}2PEQ@D}CzkkTKORitGU4YzSMbwwMX2-ok~#Ms3}s zDLg9?E)E=(;7lVvF`2qj)xTTLd2I!S6A1Q^dNI2j4+M*T7q(p&~@@OdMDZdX}A z7Gp!QvAB5J`TK^7#;GHUNq!Hw&Sa?Ix$1B#&vrMh#H7jxrCI$QDRE*io>S4^J$B}GA;)(e+r}jx~r@ATy zvi5TS&JM9pi_p4et@UjH-+8x&JU%cNh8NJruxoa%Z<*3o%kBIN!77g==cNUGC@%`) zGdMlf1=xe!`o@W$8J26RnNK?Vr%##iU9?y2?Bj;>DJ3E6b-yyR3uU_-r;6 zG)0uFGbRf@(Fwifd~N9JqJo;%1SU)-?@1avDK;nCP9pAsJ-VQJ(UP3_&meDCfn)d- zohCU1pX~M333_HNjSiK|*-jCfY-HkoYv~X2Y95#td;XUt0@?VXKEkCXnFQ?a$_?1L z5&6$!YnY*oWa+t))xRd4RdG4c*dpb9*_i^BD<>1mH;Mg~CHwVG5Nu3J?M>F+WCnez z3o0QLOy&+t!)i7wL|RTyJA7WtKwXW!cwni#*mY9RbbPhagAk8?!qA}v85B%C-}kzu zC7J~aWa17y-0BC9?Nk4z2Um#6$8D@2sS+P#A@Z!PacZQK_Xmxfl!>If-aponPwUpC zA8@-jEZ^EIjB-b$Hfo!^*>K|Ey3mL8SFp29d@%xWNUn!RyVv;!@M*EQ(zrt>_vU3v zztI709<>&pe~)j9LAJ$)zl;ru3jVrDDU4pn=#mbURCb=)6}>5GSf&UVl}i$9w&uPz}sRY4S?UoKmHpVh?BFkB2g^z+vhoS)QH6KWXdt8_gUf8o&uf= zvQ?Cqr=Sp*f3p6og*Y16s@a6ts8G<9nxlE_8k3r2FE%|dHgZ}V&Y5a)Y`^u?m8@C3 zYs`p4hV|%*Hg`3r(7S8a3jn?-fb<4dv?UVLA`2=f&%ciEp55QPAp_irTV|zu4iy3n zNMf4s9Ir8oeYe(C{T2a!(1tD}5e3I<3G?8ANxcIb1@i+ER#u1V_P5h25ZC)`aRfP0 z%uUWxv>tA^U0A?;x1s&phD8OoK=D;K(+uk9q0(F%ndBL1&`tTcD)@h z?vEtHreiOG>y(2)y0iOg2i|VeUh9kUE|QpOayw(5aXmO<{a!qSDLpI}y%GXj*|RSV zaoR^-h^kEjh@3)v@Rr(fKi)U!N5>khS6mQ0qm=-Pp#x!8?hGEQN=OHriJDfpKyYVR z_-kno-l3!$VlFdrJ$MIEn8}!cjMxC!OMMwh8e|`zBar2X(fg!8W5jtVaK0?Y3Sf^Q zqcKM5mr@k>?WuO?_gLKW^OrR9tuLPIbS!cqRunJkVI6;s#jGyBY0L_GXU!s>7bI@F z;+1owZK(yVO?^%88DqnYDH;S>zT6cO+^0~8RXCW(ITKmQ8$+cK>NX*)f~J5QS|si& zZgn)l$91{P&Ii*Imvr6K+&z)Kv!Ed^8PkH@BiTxT?oldmIMd_s&fUHF z*k(X2)gAwywrbPw(|kqW((iidzuTVKED)byeiua8rCRM;cmr8R+;bn6J`AQ<0_H(z z*PqeJ(r39+Qtp9`WbmmVUV*$IB>^vp2`qyTJ+7PpZ#2F-ho?)|B;i?ze3-mUpN+p$ zRXqg5?rT^j!|ii08mM8?5oX`^j#=Wqw?3QP8>OyOoio%RcTH+IFf^@Dg(BxXo&2OW zCd-8=_LUWAvEB==m|R{pc3trrG{E+w0=G3&H-*GlPT7AQ0t1ciZv3*+66X(GwNl(?WhIb}(xiEM?W zBI&yei83m657)5?{imojt!AKc{7>l~G`3@H%;NSpA5E)xDri2f(42ynL7jaj%}=@+ zOhRBhxC}N~!_BzQ=6P=!R_2uJ*6_68>^u;2%*3rp^irjTtrqAYeyG5Pfq$OWIaFWQ zze%ZD$igC)-LYvO{5<8iSUXcBdt~-KXEn$sy=!lG+1aW{2{_;`a#T6l#!dt#PhdqB z#v&d0SI#Y^A=f2zh4}mz9~ZCR)HB>7Bs#UnG=;U&t#I+MX#%V^jNXpltMF)Q*WgTi zmzL?kSFV8s!jOdBr}Lf zkVC-ficst^VLi8m7lA_4KSBkvKGzKZUQ&C#AJsh$Z;rxwmf*l ze`AzMd=(M)^3*`-y~aFMm{wF21QD38ad1J8kKn7V5YHnlgY!nOi^3ZwoYO!7R2757S>gmVSim^0u_8#MP}Qk{w>t*xd`( zO4K_Y>gRI39vt_=*U-JylldWVU0q9!gM*$<85;A9=)#V zz<-t5Ue3ZkjXCn(kJ`ZIOTA^f+d{3*K3OJXU$LYILM?^OT3{h; z;mZT0%)h1{$7NL!dMj|P@={&5&a4hMGqQ$joxNvd8M?bEBA%# zz9Y8v;4#Ko^je~daRffy>X9173Wk%t(zufN)$&B&yC((96>lc)IX!Lb)N3M^9=i() z;i%N`CfCtTu}r(eyS!#iRnnSS*Y%fk8Q!&SI77L&tage%A03n!>TYws1%I9vxpN)H zYL$E7ZMJ3hb-~6Y-S^(=!IyUp-Se%;1YHtTGuE6|E1-5k_Dcf7v%-3pPG?{ErA4&k zQAiX|6iqvbWrB)ICO1GdZaH^9Ea1ZW_8{XAEI*jrBO5C-%`;K>4XR<+{F@|Y<)q37 z{rAG=RGzon>=`~%H{8&Wni$V~8ti)?BrPGP+B$!9M8B@o%%v`GXgl7EfKH_S9Ix1(g%Yv!4N}~}h z7f8v*FKkI?22)xtMKSYQF7s5a%$86=mahA&tkv%)@OAM(MA|PAJ1vapp96tdUh>B2 zdroepMqvEM2~>mwPx1*W(QE5}U;6Z)v?1UgpjTBz)FIN#B0DlbZmTdKLX{|NNQ!rL zo#l7bX3_xj_WFMsEp0H$`klYd4vpMRHBc~be%DiV(nXVkxLJuABU4K z+l?(QPOA4uxrvvof`U{|)rxU;Y>3>VGelBekxf*byO zOcIt$Sv)Cy4Y#~^#f)6H5ETlc(3CdN^`FvD_Br{P2yd(&xzg-C28M+ zNlOBPfu^j;@DtN8Xe;U1Kuf9w?3P1A@CWarCA*G!ahQ5D4s8f9f7KK9DNokc4J$WJ z4L3-hbjqG5M2GRdQl|F@CqZKPN<75U7p@XUL*lT8Oi39jeWDX0MH`G{*FnuQxFV|<}0Y{rW<6n@_KL6H3Z;!0(b%w6Oi#Fq- z$)O7LgIj5w72XvhL%!}zhmYB_-!E<{l|xGmvBGDa*p)K9Jy^f+ha;0VL6HH!=Y5+W zbQ{;0&OZNjhX@651Z&}sk*N9D5}aX@VxEGK^NMVxH%l26JG5}&pAk<*xeKq|-O*?N z{CC^shE=~Y3BSucI;DlOpwPDZ3)%NU_VQfQmFM-mFB&X4HBL?#Pf2zF{TRXvtPS`( z@5Dgqqu#TYWo#FYTym$ETUro2T*aPpiTDXt1y(XsRO0=Zgv7bhgtnQIjN~T7WYn?Z z#q>$ZXfbMy1Rn~05*eV9u75g~plOsOv)q(y&@H#EP39;M{QTlbbPiV30)u#f(E-xI z!_P%&(6LtXj;w>CvLzHuiv$HfzFeX2rYL&6IXk^UgXCh)r~BSckAfBrb<|x*Ik8SG znH--ZO^8Jp@0VQV06-o6MC&vkxvEO!`}D5raZ0l}b%9xjTh(oqlZ&3;O~^(*k-h!} z3h=~{8km|M3=LLi*p3?wYMcLkRlkPxT)p=rxcgCU*!$omjf>&eS=xVSgxCOhF7YMy z0d73A4N_&FtNYZ_h;J9cNaodv3?4N?JlCdyPAT;S8d}8puN1j|)MR%gv0R9U;2qP> zyGzSp*f6X&N`Qws)dR`$S`(!ZB)(F!Wc6$Cg{nlZ?ddiyP2+m!oJ8fkzP;klekD$a z3*7b3#tfs~3TjsGIJDmhj`lkAjC<8cV)2Aw*_Yz`*7U*)`aa(4Jj3XK+B{phd+bFf zjf_1Um=yA<~(GvqI+ zEKV4;tNz&&=dn8Uu%XbGcav1PUSc}5taP7&(*Lu({+b2Cxznp9MVH}XX_Yy1^K|P} z-O$5&H{$MRB=G00_oAURDl4MWb$kckOK|p`ITz}3b9-N^yD{taP zwd`aBn-z@NDkgR`)vnPR{XS6LgCauED676ROH8Hb-9`czlL{5!!>Y62inuPSVi(u_ zQSGYzg$H@!sCDGWzQyw_4{t4uoJVYwJOHwIB4@EGlH-Flaaj(BhijeGCdX-V<8^f| zQuS)CL0&UyH1nd_)-rjHO8iIUxAsjX<8!R{ zu6=MZsXljy)3*^Zt>knLJ@A$Ya|~imYCAA50^7 z*r?*@0}aR7WYTt<_1G0jftD9J8V7qsm(Wp zrL1)nEw2NzA#&>VQ2Ehp!?gXnQ+LZ(8ry~bEYgbF`pV=WJO#F2f){Fk>&HfdHanx) zmzEPH5`{#oOPUgg6%E^(P25n#q)$|ISophAA|Y=a*%(M>Oa$@dfvezZLwxP`>>FsG zApYHqfgro6amVH|%wO0q_a(a{OC`Gneq0k6y7d<>obNL{HJo=!m6fA)lT~JvhZGWO zei~ap@%_E(f8iz9*`FolmpS9sL((S9Fm|_GClEeS4BBJg+Bm;5%U?hGyPVssF}VJ= ze?x{7o-B5i3#~;eN{f_vO_h$<1X^m}{owo8Xz)b=k?;B-HAs;|9B#2VD~5rt0V=s$ zCwMG2ok0KQS`i$(Htw3*H-uLEVG64?&~>~Ay{=`)=Fx8UdIvTmI*-%- z6l}dPyH?q1)XA7mE5Lg_Y*-IQEO8VQ?e46NAZykbYwKg|<0WY=tA!ohWy zZ|@ zxBX~yz5ND?7AwILnRz*$DHgOfcd@g;R;EM*v6_FNUAOwNKIlc!>Q`5UXm46^FbBV^ zf&C3l7R8G#swb4R#71v3G9jQQ;5}97* zCf3ircs8RwaRw)sQdWR{`Q5e4^wv|UfG$DM#o{?;b}$&M=huQudoYfBDmcSpu<=iCpxPA z+P6kxK1CKD3=R3litD-rO+LjmEVe@#+?j9h^^+ zhr99+e8B$Uml%pqsVMQ>JXEBdy@_p7fBLNnu#{HOaL((ZDi|)A+_naMxUysu8vIK> z3y#Zhteo^Se~;TtSUxo7*-{6cP$)Y>q{$7m?&HS2^C|GCe^GCCbZa&m0%wim%K*;( ziP)a=92=IRV1Madx7w zY>%lc2H#p-cXoR^`*BYap`2aM>e20 zkys&>8B3Sn9J+H2`-vwp647way4NcCw|=#;A-IRa-M2OdSiDmz@oR_DPyQ$vZ_UHA zM)Ek0mEu6vNY!bd1dZU&94hf5$91o7$RV?*9q)8inYD5P2*KlhJ^tWA)Jojg??a*N z(0bkiX0iZofx_Mo9=scJX183b(dYT5(j6xVF3*o`B)H|wZpVMeilk9u^pf$zH~uC? zH(Q3EPdV8NU0sj`)F`1=;c;lt#~GtHCj}4e)M?2xzcaiIi%nG?+z5Fw*HL7!8Jy~8 zRcA1FbRPaDI$DI;AxX@-@}AtACJdEcf_|!uER|2axD_8nsaszi9KPW$e5G0|gi2r; z3hZS(;wNLP4-H}q?3WtwxO`1M=IE4?;lc^cET|LuOn`?jkqom^ubQo62X3M^S(3z!`(RVy z%vRaI+sg7jQ4kiT3Wt*QFG0?^t-`+$3R)u^vGdqBR_0I~OQ7{%Z0_N)5ZgK6rP`!G zSUjTWdsEg@W*>+dlk}=?MCv%N*YDVov=zu35V#`n7Y1jV6YqHGF41-riFZYT^zw$C ze5LQg_VINSm-hv~-1&Vd|F%c+T?W;upU(3WT7+9vO2V~Ibb6umH4${rU}rZc8Cc~< zyxp`fMfw$b_^Ic;jaK^RhV`8^9bIXt=B?r>uc%7A{m+=mcLoj^oLUXoM&8AFG7gbq z0TUjzi1}@B*E4i0`Zfv~$oI;U?>L3(jNFWQ83%S@Ffi#Dwj{0$?QaqWN>5$6_mfF=It3p9KyuQ z5r{i#;U@3F;!~^MWzzcZ=mJ;jQWPG(l^}`YN%ZD|Wl9fMPG$}jbiVzqV2yE_?KzcI zn1ii3G<2?v=Pn{v^jKmUY+DW#zZ?3#v9Z(3Npf-~Vid3qpV-y1*J=Eq4!ly#6~?P% z?~clei#f45{?KanCPaFV5~IGcH~HSzR%~^N>D`BcC{Ywv)bN$~-AZqo(uF;=h#iqZ z1d+@DgtHE#tKEL+R2g^i=7r0ArY?IxO|~z_t>0v{Iv{#x>qRMVFG}{x+ZtFGkhnbb z9yFW6I6K_Dr1I3w;8CrjLRW((#5+#Z*5*|%ywDN7kI3`MKH=&=$g%dZ*qZw z^F_D!TEGX>;f$q#8aw}Jq9FCQ zGzvxI&{Igk=pFoRg5a>(MTD~D#WDe7eiSzAMh#4fpK+iZ1)UJnV~94P#QC}-O9+?k zj$I=%6x!=sPtwuMwnq9B(8(RZwjDba7gmh&AN!V2^UyvA{#VGE3Hh9yLwY=3*b6JG z-x9BQ#pgusnoeS+Kxehcgojty5+|l1$XeLV%|o_HH1S@tf()Bq%7rVpH1xUXf<%BB zE*e*pjit5q99k=#o(o-DHdW3+fdB26W*fCN3?kQ8?VjF=u6d#ouY`D*i&!G=1tA5) zNTp}r0A*+>;{@R;V$~|b0v`|Cje($*RgG7F64_ng80_t;tOmsZ=zW>7WuXW9MfB?U!p#B+(2 z{IG?+r9$5y@_1T<><3o=<>$;Qe zr*GKwtp83b^%BkQMoSHCHu==px7BNj_r+r(jP<0zdUS_U{3n+`rTCH#*us&Ip6jUG z)6>zx9GYcH4bZfANe+G#)LkAV2EbyaxByw294ke^I)~PQud(UEtHr3%j5T#HQbYYp zc~aPI+qB{_#9gefD5gEfUrlwVyjR6bq*(je`VvBs8-`Ku4u<**(|4C#1VA-C`PBBm zMC0~$R+y6P7gdcUTLRY--T}45o$B;`OI$e}YF!L={6M(w3oye5w$xGV;B~<=)1q*o z*)DQtun@Pe>wn$to_tLUX>`QLK!#~$-l!Q+2lp*udRcA;*? z#A?$Q&xxv@p7B3u(lKSDt*R;KJu$nrh@u#N@G}*)61+aJ!_z$cU2>=J1i#^zr!f84 zkDBF4VSSgcv%8_&m{P;f)2D~Zolo3(SW)bfqIPM0c)-z)(CCyR9(9o_tmnUgWnf2>bzfe z9{LR){`y|YIMS)spF2L${oLr~?pfc~-DX>>kJ-+Lo>F$Er7$^vJd;Ekv0p!gps|1?D+)O-k~jQiMIq+F&t}os>rHiLxZ)> zqLYM>^jigeRKLY*bpXy2fLUjiS#?JvZlK5V^>slxFCTm-Nq?D0fR_j31xr} zcLEiu;~h^pF(`xJdG(7YZ@$pM7e-2|L@6-yEBjTfg;Ir$GKazBeD(BmRV{VDh-HxB z415>V(v4z8o3Gby` zDVav7=tAcGe^6hy6vWis6cS5_O+~7|ym9MSRBoSvMnS=NliEin{%K*!3Jm$~1CBVkq{FIUfC0@dmEL{IYIoS5939 zVv9WWh53(mPL|B`yW}V{XK&}pX0yaH#(3)?=LdII3RL1c^@PUF;&HL7HaDn!X&~hg zTFc<8)37nZwbLu4B|6p#v<^-ZEdo1Kwx7EX_oNyIq5B=`oIxD+&*I0w=RnqH!mu-O z197E=%pbW;w7*PE1wH?G-u0aL2T7sfzkZDF6$HNcg6Q~DTV3Xw-=&~G9h z|GQ1Ic{>w(U~&@c$L8l9StH?Mpq9VxTeF-!S2I%#{|l__S8o^IfGMLw9uX} zG%3VkU=<3E5^Jd$@kuY#`$)!c8)_P5BkOq<`M%SW>x}($XRh~zm(GOC6)#!IvR=~_ zYr@dOc(tDAu;D1BU^REAm%)LizkefDat#kvy#3>0!zII?lRMlQ!U||T^TFKHTE%QR z=*;Oi_Kql(R_)5hRHPXel>#dwgBjK$5jJETAa^B>b0Z_N%(@@>!ZmObdvF|f@h0Y| zOFPo)^^a>_;{UCVMDu`bS&Eh}OP8<>;=8TzKs==tej{?ebxXTDwm|*X8oJ19fmYSw zl}kGVceTzyUdl!y^L7R2+A1XeyX`@nIBF0!d4a)MUXL8uqELZ~KqD{stCHfmvKo7s zg%FEqkp&BGP6ai}u%M@KvNzf~%CD)b6NU-#3d;qq^wOIR^Wr)d__@*q9()qBC5;er zC_<(0L#w7+u_OdQVIkOL7Z7CIT z(Vq^9S%Is>KKkPV|A`^Xgg^t_Th=8^a|DMDgrnbD6hC`tx2(0k+qYbj+OE6F!V`!P zg1!CJ^?$b+;DnSxK7go~#t*Qv`t(Lf*3qIJcv%IX)0uL!;-)nrsW4Zavq+1gf18wE zi_P(d=g9>yaR9^ykJ`&0YClXd==j36Cm5d?AL zVXz>JpNyU9D?rU88#-4@H^C0fh8!~`hrek(S^V8N`r`xLN4Cl~BxVj@5>0FA2E^A^ zjWsTqG32{fQ4Rhqvuo+>7st|r1h4&6VfL7YWyIN>p*}~w?fNfvacIuP&F7BD&+*=@ zkBTeYP0~fKp*5#+{2F6;p8y<}YxxsxrZ$(kSMy@~)NyJn%}Y<3hbHKF@;HJh5$X$> zMN5iND3%-(JpUMDmUg|0R{ig`D(wC7McBH~&l7x#&v6=_+@vj4N>F+0Z}jmUwd#7vy~g3PU#gp%Sp*24cx{WAY+@C3IYY*|QBftflWs|3{Y{~;2i-f8*384e z+CBh^deswmRqc7(t2VPf5w6sH>d%C$2*)n>5XRK6H9U(`L45R-bFT6UooqxFH}@J& z&1SEqiQCDpzW``~SQ_gjO_M!_E$XcwMYk0K(Fb7-7KJ*7pFKTVmu(1cQT%nX6-REF zj${&9BFcI3OYUvIFuazb+FTh^*JKxLOvTC%g1LEjGh!{xi&% z1+$m6m`Tit;GYY$Dea_X!^GBe0&dxOr|*YjX=u|6dGpM-P}rF04cXx{xmd$b~ffOss`WDY*;Xhsh688d8mHVM}Uv(nnki_!uSywl|s(%AD zj|u+L^DNB`vL=Vw{J$B@uMF@771TYecSK(`+d4R-opFW1MchK3d`<0DR;n(is6C%^ zeqaqb1Qzv@{pqD~9iEP`efrV3mM{0P*CeKKc&aiQ5aX@H;v|%yr&!D}R zStnKIbgjpl~Bx9i6w_Q&v|87au^MB9)_728^+Fkug~xIpU1<)!?kPI^?JWvujk2!mL`xE zm&uugM4{yKVQLosoMT+S1;}WW+nJDyZG;vFEb8RxSJkM!eNlhWi2_Jj@EuHEz+S< zVH3@}JlO4IZK7lCm3x2dr#%wJJVGEPPy4F+gY55I^VnrP9hPn)e9cTls9iSbRd5`r z5k=`VldC6@#h7}shK9;^RX`~>;qMF4BNn!K?O|hr+Bezhd)S>Ot+n?O_NHB^RgBhB zjJ-JAO%<-2OAQ$ox`n*GBmOrP%0^Fp-Fi?erdXS_46J8$CVuIdy=_wg4z;nEm*{WM zS*}>|pAI=rwFw{n)aUSvhD?+&jKw6^lfSluJN`w zGWJ@(C(f)D@SlOyN=I|YyE|_gKNGM20KXHLSd|-u3g7wmRS#?b96w#hYZ{<6(6a1` z{}q;U4WN~{_@KjO>~(Rg&-NLKMi=Iy)Kn>bH|MjlYDVsn95){M1+OFA0*&@2Yp63Z8H)dPxJ1@VMdta16MYlw6NRfBfN7;V^c=x(Aj7Ae_oGDZAOaR~{A4z}^ zr2FpH=cf+}*tS~DZ+r_S%iuXAdV--oc#X!DW(_?(dN63cx!eqpC>aUUjj39&(}`OH zH)#YL0s~M(joX!}{%v&jGXZBm=Vza#5;pwxM##KV7xlNyAWYT4KWF`><_ZDK;EOQb zzmDe>f6CnY94RIT0e?M=iu?z|YnDRi;NY_8@GmAYtoN~F03@Pt+40MI42H^Eig1Dh z>n@e*CNX_nER9$-KMweYx~{GQJjRSLKsEzroNf)QZtNwkRA&UjEdV1pM1#S|o%0%7 z1KigbF3F0yat-xpcGp9z+4iAc33OaFr})}E@(UE~ev z-dLJ+5VP+ z;B2&bhHWnjcwequF9D=4nN^LA)@2P@%V=>~#}o2L;&9eet$Ywb6RyH`^jiGGxiS)D zu;)_?XG?4um^ii#IILk^zl^yX<}HQ>0;1@NIJgP061p(SH&(mlOiRG*)9z>~w&kx6 ziJ@sv$1we8zHRQ+Pg$rTBpZ{6FXcMAJz?(6+*|)ilKsfyDB9Mbq?#peg#7ztQ`Yj= z-$06+-R7ZyL7=!MR-8dUAWryTxN~1-9;cOOA5YLusQNyuZ)LXbLYAKh zM=Qd8WaC4kP2J7`>{8=y`g>1KLu4g%l(zb?pspS#+}TFM4-=-c0P6KoGWoA{tF=|{ z<|iH^*DbLKQ#rjw60mZ4y0S3;sNgA}z$?LO@xVFXEDTo>he(Vj|N6Fc1rwMfdC))y z25+}~U?Z9(yFUD7SFDWaF(+@v zAk0aWbbrn2CzXKSUC4JgD2{2hQF#V^UF>#}#zLawO?%kJDO8XOp zO@4AOtbzXS?^Un4)gioY?Xyqxq*4|z!{<34B6JruBCu)Faiq#D-(I8>YtL1GuL%s~ z#(Ox!N#EoR6pnzFU$Z-1IHB?;)U`h{mDdk42~m$sUYUQ6AR!uXlLWpNwBre#gCu6e zJ3Ss|HeV}NQ1T0o9`Tjge*$yeW@hBk`e2IqPqlT*zwl^AMmJyzx|ld{_{CWcBfeJu z?z@L}9b7kfTCdKykTh5kLY#M0j5$rk6TM+5GoZpfX>`V<@Z(+W`p!OjG9>AflZy<; z@tPlG0?et3#|d>JTJhqP0>6Q)`R&(e*ECHHh9OdXf6Fj0mwkGgTZkC}kd8%o$T-!U z=!fIybRtePagnJJd8-Hbn1`AVsI_I5Qt+yQe1jT6U4K~*df{QmLf1af#s-)?i~O(( zy$=gKtd3TQN!y3K4wgI+;g68y;;>{(xdgz!4K}MMw?DzLD9^fxu6R~!aui?A`rG&0 zhM%`e7Bm2&2nCWS#ivZz>MExi>Jt8n?2TaY0eMuq5gSU8{T|N`!HSP?eB$Use_r%8 z-k&&zLIHvx2xv@*hi;~ZEbB9&edIRDv2J^=dEA9OiTo6ceZ`d^6w}kM{t!#0nA!BW zBZGNj3Q@O=q|nW)dy~nERCAR$1V%2id}jhzX(4nU-JU zQ^Ld<4driYwlnEhyT0+Hv7PxBzziydn#mG|Sq+|P?eUtXpNtU#4#pwFng4JNP#|LOLVgULmc_b)XD!VV)>KSR`XC&ZqEn$(_jT{*PxkU;$jn7L~l93Yt>~90G~&0B;pGQ5B2MQCG;ztDC2|3z>kabTI$nY~J71B6m)H!s}A! z@$~bn8~p;=XK?))JC}Oh)jHq%%48oFNi{W=#fN{eUUzLL*a*OVy5fVL^plVQ z*!c8OY^17j`YPYDS^cw@D?!L8T*DdC4QB9OXOp0(y;XqjoNsd?Su86Kr(+;m~GBf5cn@139 zN*{2g)MB(2b8!51B$j^zyrDoo){5`D0F&`Taaw(VuX-nN(k3lR z6#}3$O5e+zd+TLICY|ag75?wm2y1ejR=sSE@|GVvI5yj_tS8U9K0_XwdY^h*Lu7#t zLoxo1II3AM`lj=qN2 zgrDqK2eo3)dqjg5?Z1RYax&Niss6ZJweQ$SQ5jqpUpSn7Db|jjAJZxQbFleZIZJ7* zsPM}~YIoT!F@`PIdzIH}CRim?f3?1nWCJoGP%5ZaDdTV(Xn#$zbz#;}JULbDgM7RI z^K5faRvF~MZxkEOhi|K|(~C>nj~oHK@cHooUxs(7_t+HU1|K;S`CBG6kzUMJ7`;?x z%GR*@brC!pQhp%#l8l8aN@qV3irFDOAie^a@LTK7klV=n#`!VCG2fm2bdiN)vu3`% zt8-emIe`L;opLc{v$%(!C4-#rC zzIM%A@YPIhFHGFz`(MN*InL+^{0z^=#rFcK8l8y63Bqe}k; zG^muIs@3qbpZPI22~Fu&>Kwgt zPKGRoy#cEeK?zSKZwGGw^|o_IBKAX<5sNKQu~&n!8=JRBoJeL z$&Ufe0LhhVhii$(Z}|+hVFuH}NoIarRHh>4v8lD?L2#KfD+HF$KGn;OcJA&DTl0@oYkv$5z?t>VA0?_xuVCLgC$6mdB6(zaDWOatI1wQ3QyprxR{kzF4J#A+% zBg^m_8L??|5rJ@6Qx4G8mj<Xxi= zY8kt(5l$MPt~96^IR=A0c=jn<@zlVn$BzeuYU+vQu8aNfH-k40MWehFx;3_&e-a_X$0Dg7DVIk4 zhn3thZ%8g}y>mIU-LzWi-RWNeox^uY?enAFptF5?BgWQ{p#6#Mg)t>%0ae4jlYu$b z$IZd~KW8UBXYlFbvp}}*WUoAKW4aA(%;=;iPGuF!=$?uCGe>gN*E_+t3OukbeRo?dA(iW~ zp#y+`J?9~&-A$3JKzxGpXlmkg7E21SP70j(B?M4zOIQ*XJ*z4y?Xiju6fpA`hBwgs3o z+-Ls_U)73MFQwehz8ixSW46y|_&{4y#%RUFlK=wQ_ngEnO6BHWp*|TKPyX3bh zkrV}3*tA4LK1i2wtCtV5R{JbeMH$4gkM${k6`W8fr8hIoL=w0_5f&SFuC2qv*S(^z zYK$$#Z8(8)e#-=UE1v3tC?-cro76SGj0gg6C;1re&nOBbE3Z;O0Qb^ID))f%RW}ie z*PbHuIAD}a)!b(xVBc>VxBFg~q2m#2JHHBwU1Krg{}(fCKzx)q#)p%rbEA5j+1wb zA)JKbX*lBOpzN4W4VaVSr+FK^F3W|{A*D;Hr`Z2AK$48fx+1L=B$Z(c}m zwN4C%*PkS1kyOYO+y=(+B71a)o=FV2X35y3?hJ+!F z?`qa}{ewA|*7Fu%*~vzkumQ&VXObw^;mhc93cv?eZ_ z9!7abeD#>gC@q=Mjs1)0KAdVZOo=Sx{SxLnMJAUbPKeJ8A=LSpdnwvs-}<4joS6q~ zai#IoI_Bxsqp%Z8JI>-|BRA{*{ftu7{IIKl#y}4K|82BzVDd4p!c#l*aQ`l#)V#;B&qH=_vl30@i12UhsYn(o64$@ zxPBNlsf2OlVv4~j&E&!aVRL0@#{z8PK4&xr7TY90pVr}#eJnvq?!38c0&)Jgj6>VZ zqI)s2+O;m)aJf}whn}HoU6U|9^8Kv-Q2gCwJduU9kVu{yQrYvRX%PY8gA9g}3S@i%eZ zE5=n55w;~mFN5gNpQ2F~v$l5eVMehxoT$9rtinjhc>SG~?rS|5(msB5ZQqc7^42P?Qlm}CEYWu(>-odN8ZUm*VElV?Z7+K(e8Is38&1XN|C z$n1kk=Pv!%=nMKN3A_iN$bDaTMJ{{V*ECQcl+rfMfw`P?(^UMQNL_A!eA`TLSF!0W zjYG4a8jJFLpAP#$bbnMWJA|M-IU}n0ZSk;Xx6?Mj?IpTT;fbes3ADd6hr6F;tjm2W z$k-wf$#f}_Ty_V_)2kG}RJm^Fl3G4<^T_J|cb3{|nT!1{K|((!&mNvz%-P>B*vvQE z9b~nk%20pLggz1<82H%Chx-(}AwSE-3w!q zK|bE_xNn%tYbPLohsLv?8)p#rzGIz#*31l}!HA3n5lP;ox-MMTqU zSpeY>YmB8|xMkroEib8tb|KRh#%g1FZG5XYQ*kwu3Ph?{2ns5nw%OGljQ&d{TSq!> z)*Vjla&rV0H~+M6uuh%C$>`Zg7< z)Kt6H0-`z4!CLTkx4YVb?H6dylI$g#z!Ssm3_%YG02a{BBQ2@_aCM0-*HsW=_4DMx z%Wrm>q-`I1rx8bagOAZT{|=;lW$ z5?q^A-X4n%e~eL~#fS%2*`KmMxfYjXd9tUJ_9;3_o7`*aPzK^BHUOll63Tg`0eU{0 zJt!7(;SI0DJ90`JA1AXWM&hVyp_7%=T4(-4`V)kjUvy#;&APsI_)iWa=@MB!TA8KX%xBBTF-)LjBkx$P$UtPOr6a4Ir`M+F7+v1A39ABR_=CEo^yqPJww~xE| zVCZ2O#;Oh;A`}OOqVLy#Bg?kxlLUQH3>I`E_DbHPb=1x${{WF5z@cCNmI8&e&xC~9 z26E?PMvI2V8l>U;d}36B1$ssg_S22X8g+Y+>#k=h^$31fdirSZm2qGD?H>ET z?`{@YS+V3I;%&a8PlMvUrA?FFv7;In(XjKkklV=6V)ngtorwOJ>bnxIO+hu0117pgkpH^ykksKbcTCWLE{U#6uPnz-==pfosw6<82c zoVS=D9J&+?0Ifjk=a*@OL569plwQ5WJ2z;*Oc4=get6YN5GY9uQvcVdH4FU0Q;Fi< zP*)@_D?QoW_b2o}RcJ5*^uul;20@TKeN!@cn0?B|oB?HV&p)u!&Eq3^)hMa@;F5m` zV%-c$4xxnuQrGxfL;~w}eFpjJN(FWcnE+GoQPJ^%=cjuQ;wcy0}3{5^0E!|`&Yy$`MqsSXp9 zJ(*6HWXY z6=jUU@)YOqJ5(W%A9;?-Yu%KOJnXmCF-!+$yZLU!FlJj#(0+c56NkVKnMTUMv^sK( zUk}{`ZYN>JjlZI+{6vV2Pq-C<%X`=#Jo=B#@}cdJZehgaN>0pXi_Y5YuR!X7!#?cs zjm@vc{SxGdV)`l{>GZSELf2O%YD$QuJO}`gTZqY;*x$jy?%KY_3syf%IEuPkD{h!bGz{^ayY;?FU?Im%R9R?nNWhnc2(D zJkd=86%4iKL-I*y#e~D;buqFH2YG3K_9tp$>G4B*g2eIy!K|Wt-Fy>sFyzQ*!c5F1 zyyT@GR8-3=7}7>QSyjCW$~4CmOg+lrvHJ8`EQ;!@Fm^A1j`>0l{2ij17(Sp0GPRe$ z^*0!Wu+zm&J*)GN;1*CS=%qdWg1~2sIL`YLdy1%7yiWeJ?Ury1{1Ey)D|`HsRkIb{ z-~6vEWgk9J%{LX}fls%7cEb;FH2?F3SgKBuj{>(2cIuI@+6EzAHp_a!n^}iI8@Rn> z-t$FzYmCna(yZ3J%C?+h22CS7l%jgN{`c2FUQ%E?lE)gW zk@(!Emd!K`@Jvib!nPhYvd7nia5Jqna&My4IZ})ij<6+$<#856xJ%AI0>vi9wuO_^ z)6NWuE+NnuuzfK0nOnEr7y6SM)*WJ4y3=bsr5#r`V-Q_5SqKC%wKQju8@j~a9k*)F z8FfP9aTe0&6@=ybQeDZJLA+s)XG1 zzspP_niVP)eeE?*p3+=-sN)+)zJP@OtaO`7pKL7wJc)oS5Dg#s^0=p&Cq`f-K~${b zumRE|VPZn~J5E~g=11J}o* z22Iy5&)2Wv2!j)dL{>TjYs*=eizyELu;xKtW3U9F$U@Jlr@1+a9fD#g# zh;xC)WwG z>&wQ}f1&*64}-j026c*B2Ltt`nZdSvD%u5_^!XsLx-Mt1U4c@}=Q2CINxG}%7^{8o zXbeLDS&p*ZGavN;9-`4`!9xeZit|hDx z=C|@ITALeRMO8(N4OByNx$Zn?O!67-i!eJ)9At`Atv6U-Udevd|`M}oU(q>M2RXeiSs zW!?W{X8x;{#3i>uk4ec_ggTXxQJ3*EFX7?8W!Oz3fQqZ0o7jV_*2gqA%P44SpwiSk zhu5@{+?xU7>y;goPMPG&AAJf7`n|C$#)eLB7Z;*z^!KNChd_jNx_ehztK%t$6P`s9 z4Ni{Vqh^mi#jVbGreM$2nL_9#pZwvfMN;gs ztw*QJcU>Eb)$NN2&`rN~Dq;D}{Wxgv$u~)ut`iBeIGZ_CIs<*dIOM@h16MYISEfmF z+cxaY!L@_-xb24!k4bZ_vqm+F-bDeCt&XPaOR%{VaX|-iPkm6Q)f?q~n+JdLOSZ*3 zB9!RLx|^kkwsOCF|2(Iw|D#f$a=5h8@l2OT#T&}ueK&z9NU=rTcZBeNdZ7+?s4GN- zlic)`0<>|x=tWF3GQdIUXyoM_#1ZPLH)_Wn)kEq5YjvNbJ$?BxUCU97wC6@$WIaG6 zX1($-Ck;p@#H-XBc2=;z}{Dkw#xG$l<63ZVL0d=YZ0s(!Jf$Nr|d-cT#gRQuuI!u z3OObbU^{2Mh3}Nw(;L)qUY%q(acsEPtmqHSilih2Qpd8`8fp1@U40^BCG&qK)cdH% zvA&gEUv3_GfvMR}vSXtVZemYatY>&Vy~oUWQ-&+9D<=<-7XS zqHB!0Yd|-ameH4%{i^3;r~-qASzpq3%PCim0H5~E}eMwaagvHA{L?8xVcRysy9G3tMFNv+owoN)$n6Qgfy89Q@_)mWp?b zf4)2>r=V)RVfV(j^g-j;ctb~AMA)C!UKR~RNm^l@kA^c zKHZ+E#y1o6wJ4lv zed^Ek7O;@U*jR;*zK>1tXnml5cK$1s6#=k)U6PBE&(KK z4Kye8t~dG7Jf8fCWMfX~#^dJQ;upkmz#(ZQiBaGvlBOIjB_A&hAd4Am8&crJATCx3 z-=uwAZ+hq&T~xzk_j9@T>D;y);Ea%&UJVz(-XSsm2ykMVm!R_2i`|H()@C}exG^%A zd%qm-RuD^K#X<@GcC*T)-3NHWfBp#;11ELUxE ziQd(lIBEtntCCSgUqCu76mk|S-Ou%E125Ym-<*VpuAKuN<&XJ>hkZa8k!;(M=uw<{ zRdHH}yZJ}n(~Z`u)|yp>i*aPYQ=x7r$!FLInGE4bOFherFV%(J)D<^7BcKd|vY7j@ z|LGe?-NQ%{NoYX*AAa~-#+g$pXM&p)0^IQ+BEg1dvu1x`%|q2^oT}E>+wY ze5K@@+-zzwhdvkQc#iyJ-7Qit^9Z=U6mW07Pjd^*P;UXgAN8lllON|ghL79wsA>YS znVt}N9gvk1@^tBd2w|AHWIN*qgf4EgJe>v10y)Wm%|;FTs*g4P8z9?QP_W`H?xv?X z@JoRRAELzj+5IQr68ASlvmv@(*=xS8wMD58<-SfePe|wpFqp+T53_Hh1!f;m7~Obc z)KJgdBv_>bdkn)J+?AtKXN`5jS-Uk;t?@uyPAy);P=z5DDnzgQLfk7px5OC^?VN+^o?%&Co-xUB@ld*Vz#8_isRBrfXfdJ|fWA zEac;(xS6wzq~?34>yLRUK}Q=;;cP3JLffDZ%0I}a8@T*yg4<%Gon^?Gta*&=6@t8y%p}s3#}+KKDx`+Aj$gd*UZ-}FwpBU~rG6C2CN@}A z2+Uj_2hlx~O%>q9P7|zZ*W(#g02O&GfUrTbLBHd%-M$BXwVpe4dJT@0z^jar#%oRR zIwk$lPB49HaRUf-tqa?re<F2$-vGJa^Q`_ZzJ9AN39`=v~?-m&|qe^B7AA!pUKOjz~3?@ z8<25l%$=NtLs%V{y2N)DJV|_f7C1QE;4N@#M1mB!z16KP+$M*$vbmI>>Vzj~KrlrI zzgzZOht3SeGm?0{#y`*emWePM)XV@~6cCH(H6(m`;8prz16d)qRF^Ob- zQ1LG;YF65m?wD*e>+z|kaiG?V26>DPYwrqcX2`vrsX$rhmW~^KE=%rk=ZyGYjGokS zG02=0XO)vrAJY$-JLB0ONEgUs&}JoAidb*+?b?_r16@x?Tg>pOK58$_LQn`KgPF3j z(R6LT?Y1hq8L%eB{8&f(rCylb+mD60nlQ^tzOFC2`F0q5w3uK|0>_$0XJG73#jn$U z>rt4m#lkf>Sobv78oTLA%9=l5dn5e=gY&J^aCIUCQ&S!g%QOiC3gb6%i!8;Ix_=g& zn0;(&wRENYKA~Xi1i=3fvTEXhS=!aF9!1ddxIRMSNvcJHkrUv zjDy{ouJ!pj-ndE#Fhlq(HMsfOUjFXM$Z`(^I`Hb#8^gV>i)CZou0xF&`-hHY`L7dh z&+gGZ{C(uP{BE-Bjt$k_+iaLd+|JVk6V&wu$qvTk<#bIL`sv1?;*~?G&{{tR^bO` zL_SHT3SXhVbyTRI20QvpXL#Q>!}*MI6LjJh^kZL4`-ItSt6%25wprG6wM*qp*n9!e zy)eGL*#?r@)a+mBGUB&VG4ge@(T3LBdSX8O+~bDAeQeR*5mg(&S_pNw(XpiTqN4*I zhSD5XnB6D4S&p_9+guu+X-pITVICMG zot2J5Irr5}+7Y{JIkw-*9Gn9*JB+pCNh!@Yw)vSoh1IIQv>-Z#blR%)HGWvz{f|u~ z(&!@e9YfthT-&fPt}d}v$FjN@g}&b-Hb4EfJ;?~LpfnQF!8as&G>JSNx{I%oZdek} zrzC{}j(+Ax9JAsUsQZ%_+DjdjM0XXe?7w>&RXUpcb$F-=ob2#(59K5G8JIfABiUr= zSBXym;b$N1aztcq?P^l_{z>svP^R?5+;5pr;V0BX2LFg!-i;f;j3A|ZM8Mhr_rUfb z>k35uFyG+Q64Zwp>!6*(mTC}o94_SFk2KOSKcOK<*_qJ|6z}imgz0=CnXQQo;DTmf zmaO|ZbRN@}UUoXco8J%Fs*2zf2Z{G|GbC4PSK$e~-{uex%4%j=w5ai3w3??SWm z_!*o6bNHBB@@L<=*ta);(YwmxNg=i9G6^%XLKXc;fAWz#uW=S~kP1DV@o{K)>IZ}E_;PrAnU_0 zN&0Aw9=)j6p!5#!hF0H{Y%QhKj#hsg&BMvjbsIKaUr5#$SPBUlB6GD<<7xIE6aV?> zy@G?A=w6BRS^KqlyCtm71i4`q)k1++}7vDkijKv$2(8U%O! zxdJ|nkz_Zp@=f7b@_Gm>9g`Dmj#?+#L$%`LFSG5~kuhd&*-eD<7O;-0_PNNA;(NnR zX5%z#W1YObLr)JeJ;`YcwvWL#R=dpK@k*^DJ445v??jj)PXj}m21iztz{oH6v-90- zpOP}rD4#eXQph@PITT(7*H4o)w*Q6x(d07Tjbd*Ofi9wQUvpRn8|2i`)iJHXpP+gIW<8$(jqufda|7?4}~? zL#-%UVU3#ZSrBb@0I|p|D9Lx(xuy$>F#`bWP zvs4OY3d0k8V@Ep`+?p{~Y=$P*ic9&*g}(Un*Nk)b!9j#{`-TchunD}DV)Kdw*Xyi* zUR}66M*^B>5CdyKu`#EX*zP0g-A3+HeFbk+K!o}ws=aq8d~bLiw-U_CJ5u%;g>LdM zT`3z`)8EHBXlq<-^V09vw;8miVuGcW?XW95$Xa(oz!|H^^Ht}Q>&yu=EQpT(zRGn< z(QP0`iKeR1*>asWvx$R;h1$&|OsRAbRy%F4p&o6Kx39Atoyc(!<2&AVy64GC9n}FJ zv2~Ir-f4rL_h{)sp^ftL#S$Q-D}XWIu=;D(1%EK9@RqVzx3210PzxIGEWEO2qOi?( z3o?ex3LtNqeXC=Sgh}==P#cPTV6xhY=4`z$Ly*^!e#?ZxKf~63&4mL;M$sb8Ca?N= z7^a=diX^_gw{>fH?ANA|hsgW>o(h>CC(;|fGbX|)_)ahf0#4kNUX);Bwdp_R)^O&^ zu!T0u)btQZ;_QSkU<^_#d7Z{1ss0E3z?aJdx*yu@z+LSQ#nSGKj)jMAN(UNe^(RvG zyMwz7Vj8?3aG1{bxp!Qz^bo*3C7u_h+HHMPWAQP~T5UX7vwjL>bo}Tl4xTZ8dg@N8 z&Mx=0Q}~81JR5^$qU*{2o*T~6Jw(w>0OhRFeZhmi;mCqf5LV2#Tazd+ym?5{|qi0$eG=xO2L=bbjmC!YYYq&2*BKw zOR0zI&;u3wE}kEI12f&yY6pT>1AEU~IoY}%TJSB|T+C>A@MOX8?#THE=O08Xr)ji# za9Z75?eUWoFJzG?-|`lh@Zu6dag4k#3<-#5e4WW?9mI?DW)O#DK#f)9#|2r}8}?SJNP7qS zLWujK{|$|yP6$Cn%OS4S<}G3y@FcZ&>l$UMQj1kcFvd#%b3`bKn#C9j+k|aT2 zqlf#@?GSBU39!74cFbcwPHFR~zy*>c!}y1js-8MpK~#bxkI*rdKSP2Pv(~jF&T_Ff znsO&s{hSamcR<$S+rT1%YG0XphclV6;{69*bbXk<{9>B-V~$~FW*sqRjw|+U zJs#jp%4A0W8UO6i7P1MiE==MvUm2O*QBfTAH4cCF%-5H(XZ|_u2g=9vVE^_3n2=$5 zKR5nm);ubks&_3U=r83We0{eko>TEdBTcGGeX#G68`N^Av)uA!;jk79>a+nQLjq(= zi67u%y=fsGk`D1fy^d+L{ifN422=-pb{=0CWF=zQ+E@R*;%MnANAQc8cHmScHXa}c1{a^ky{?X7UDO9&7znY?ZU8J2~xB8`}+5}QSW0p)Zqv5 z9Si?6WKc}#@W=xAr0va6$xb-%FrxTz3DBYc;_2Vuj+8ol|;RB0<>>J zn|l^F!FG`k2t^^pnPRY`(;`V;{d#;xlc2dhSR?t0wo9s`W|E`;u<` z`*nEg#h#OuPq$d9>8A)#jqZqm7?ImDV!7kLWj!ej_ht8r3$<@k1Bw zoSyKtBc5C=L^w}Q1_W-c^w~Zbq-S|1S!CFaZ%4%->KVm2&Ec|wS1ak}n59$X9iFcT zL&gC%9Q);G$tPW*Pdb1U+r$W|iXom)bauborWcCD;A4S^*IqcnF0~->8P<9-aEOiT z4T3x1t%abdH}o&=UPDKKBc%k=C;*T-vuO1oEbQq&CtDle?y)@}UoR8|8>10fy(~yh z<!1wywiIz`qptvwTBTE}m*Mvn~DRV^bJZN`E4HYWa5-Vf4*nbL04pwCbUgs5TkR z;`KiLxE15LCH)E2q5NAav+n0CF{bKsjP!R*nt22Hu28mZN}dnZd3_}MV`HK{5ROn( z{}-hmpfkc{=ZvE)k^?sQ1`Ym~?syyD0oT|6STvFeH6%g9##+umh=Wcd5jRoDVBP8fgwWZmWb6c9%QkuvJ2iBRL;dm?dQ~EXSE) zF^3JC*{R?A{{DW~_4~sguFG88XP?jeb$Y%YPjuF0py;>%>(h z{g(G4;Pz&XO{gv_)Lb_k|AuabP7YQ+hDYs%o6FT}XE6(9t(T?-;g$bAIm@40l~aZJa_rbYA&Fmj_7s zs)j@vKas5G>*U&+IXCRNtKhK%@6yjrCec$c!X!ZN;Dk;=a|2S?+miTYGZH))DEXA`fI(Y4XOao%|XpdjJNucG$ zmP}-_K;tvdG36WUmgS_gL!XI!gzOflbUr4l z9ea${^|*HEpupSC96FuqC#S2u&zv%YxGl7yISd%abDDd8^R3q=)UJL23H$fF=|CE; z7p>mmL3z~jr+!5Czh28t)4lqqmr!{M|7_nv(l#faelN6VnHL@BbtyUPI%%-+4w5(? zP%utUEj-zr<7-jI(^5Z5mckXvx$|!S5)FiTrkRx^-nVED?5Kxcft?!GnT{R)ZIFP* zCC_APwHr_X7@3zFs7goWi-6C9?0%%=J?wgmn&Ex@Yb%v}YXb~{$14r;!;|1_!cZGu z`ZQjRyB3}0_Aff2)5E4(N{~GfR2ZznSui$Vgf*>}X!0KG)r+kR^EQ{hwT3;~X7K|! zI?U@zluLWMCbkbst`v30sS0jUh}5X*F?7w+99m9n-gOciu7Xvv zFAwGn7@k91(sB#{1%tzX;EMEy5dM+yPA6@8uI*?tlaL&VdpEak<`Mch-k*t9x-96$ zl_N>g+l|D+c-1K|wnqGU)AgJ%pKEuarV2Edb!B@)?;8d3VWZj0G#DuEV9Uc!0za_+ z<}bN606oaW?G;8wa1|oLvjJ(0nBT;_J|b7 zZ-%DhW}T2-HoL~gM}x2)Rp?42Y;zV{vxrg{@Q>zEx0!0_Z(A9r6-r$?Gi95y z*4<(jR}$^#-OmSVp@a^b1G1ih_aRP~2I67mL8A1mG5|V-xb^+qG;P?Wq`ARCbOD`U zjBizjV~tV3^-|Or{)6Hhh*U0>8?(U{Y?0Q;PR9Qs#J!YkA<-i}NlELn!!30K65W!q z+-A)!KHV}pR{p{HzTOjxv~=U{4cxDwgxD;64wiL<+?zhCq!wBPb=QT8TbB3V>GUow zTfXAFy?1HcnDR;^BVl6*_0b|7M%ExD`Z>ubyXBS4W<#B3Mdj~4b?ZK!xhRZ5DEX_j zMG^4OhY^aB+QjVAKtr5h~JJ;LoL>P>Sb&#o#(L3_rS??VWVi zI^OA5vtXmA3@N3^%f3Dim4BR?@vNgc-tqSL`qCjm4L6$d1bVu+LC+-LVT&l;9@vMV z5FX^I9Dv;$scI;bx(J;^OQnufmFGGV=)l{%U|UVIekh8zcf$yVLZ%)+P;i5%-Cd%Pp`ZD7=0*}&i`KDryqU;9Xofh_t@zT(MUtm0|DM=9<8J}w_HOdU0m@U=-W zyj7rf`{eE8lxZy;l#Wxj3*x&)Wn9kntF;Jcayj*PZgx?*blBkLRLF#3L14vH)K*G5 zREHVdu{lv7v3okL=AtK`!E1k$*o-`vm_PIy@sw#HG>y}ZX^me4zyfMqneYL_ zRjdQgoc9E7@3A*n8(^Z}1_- zDok5g_bfx_m>~`EP5M`H5;VkCIBZr0=K`cXJ8KYZ5s+9uAAy!f1la&%qr+Eu@#eG~ z`*&Ya_h4{Y9o7;%Ivk9ib3x0rNYQrfZ6_<1&TLKu(Z2jB3rv(NVBBN*Q`(IbKsQ~1 z{WFa*U9JX*u16V1US)%Y`8z9h(K@hbK;cRoHv7e}q%RTa@@2?n^c0G-&_JLY_k+{cgPPoqg9?O~&2DD8MY|AZi zm!{8t?nksiH@@y&(cuaLPGiW!lYnib}L=D)W{}wQvV%qyadn)r*IeWpYt8bY;wp z^Ga*(3h2N)m+CR>D{#~!4#xdR(VAYL%dyt0D$8Xq3ug+|;yp5tb^3h)cQuiwRSO8Q zaGEIP^A%Etr?JyH!#0@GtX?T?CJCWCS}lcrU@_YCaOqbzWLt1EK%1Tz6tw=A-XqXc zC?;ZJl{bEF%HvlaKdbAQ>roaHY5j$`n;%PAz%KfW4@nLP&47IXsBj)2G5svlw)H(T7mbq>M5QbmJhYRUTu97`wmq)?t(ctwO zLCE`u@k-LSe8XV9%V+)N8}s}dyn0I+{3Qu0_aG6xxXYjNF>gP@jZ zxQ>Qpj+KradF`oH{|kFxIYp1+N*NT$D8^r^zI!h7y$rOKfi+$Taj2~#ad4{IkmE`k zs`j=|dHz4q?8SVI;uQ%TMgnP<9FtpfASpmw{{Hr&<6hpG$6aa|L zw^!xLU_;GE6n(_Q#OKs}(Lkx)vu%kgS42w0H4{{~D#;iVph1e2_17R`b_$qBmz+AfR&rKeM z;H`~FfqQO8-i=|Os0SqvzEpi~x*HwrP`3Nw^YX%^yFu2K&oeIQ zj8IvYz}mWTlSatad*`XI%s-e)_}N^Wkc8ej|BJcAi_O@pUHT{bJ8q4J)CMLYmajC` zr3D?PauhPs-$6Nt%W~R@RcH*~V z7Pz}_gniL+kLI(wof+d;^& z)dbQT@hy_EuLYK!DaU4TQh`B>Y(oOecdA>IVOw4QM@t@ZwjQO3fZ3wu(bcOdlC<2I z^UI4LMX09|N?qI#E_aE<=9NQq*9lVWN-PG?Un2b~`%TDTAs!v1L9P4+=u=~?tGu0L zqQq_uMYT!Tw-hTdWdUnLE$>7kjxq(Dr}k=^J?fnD*D&#I*&uxG_HIin81G@Fbh#8J zbX>=@dhV7jCbnaeloBKsQ}BKB|NQV#?IYHXU!3NJhEV)|ZaRn&iXtRsYc-SD1zsTc zTgOzWiphRb`jY-Tx|b9z3}dBROz!821TYWL8786~u{HkSw<+#&;zIIhFRKJ;23b)b zhPm?= z2l8XG4%6{gF<}$y<`0|eU4KdgR&crLAb6l*1c**#3b_0eB4h+tW5y*>|Vyr z{w3H9nEnlzgHJ}VVAmHZUGi;1-Zx+JGR{7}_qwBVL#XL9_aar7s%NcL8^9My+S zO^9VTPo_`hsrBfLL{|QFbesITTJIJq9Xv0m5&Zv2MqOo>B&0UE#jOCx?Jyx+TXceq zUkglC!nC5TaQ%&*=J98kno+L8aF*MQjrO-C`_`6MN|H+po39zh940U=rmyi?rhOE{ zCCQ8MIRc$VtY;$n8uF}OEi@iGw&szpyYq3!1t{QHKDy`@?4nZ4?Ka);;oX!Z>~u`? zcpftsw}Y`nT15O$zxvY4wHQtcNLx-|?1#F79_fv5GRtV4dBN0&c>qS+C9!%CY?`L* zk}K1h^s|oBfMjo8NzIXciC5x5ItahF$T4w`Y?MgJSGbH+6T6*AyQVseo&F`)(&Y}U zoEB-I=)&s8>ZIAvZcDFLnT@rl=q}T|ZweZL__*|kHw2JE825tVNQScz#XANccAC;6 zsxaX5!pf)x>a*x`)esRn4TKWhU1B}r}TMqafx#Pxv6hIEI)E~^XMFaW6~wZkPR0Qrb@!yG`tamXDxDdmSzPh zZzK?j#n#fexb^itOCcdf6fc%$vnbDRl<`Ap!(PM7|Cm?4WjwCAINLFAZbkR?BB{7| zBTxTp`MXym*02uZhRDVHjoH)CFFqY06!I64q+u=NMDmq}%;T8l(2aG7jEKrv`-4^3 z^Qi>k&9{y<7gcIehIQIpEWAB73X%^9dKqDID4Z9jHC-<#%E`NHhLxq+Wfd2@Dnuid zyh(1AsOd@elFUuCO88uBg55jpF?u6B;gdB5P9y>tc`KoGAbq=~IU>ZlS%j2^R%Tgu zU*iDg=&^l>5SO|ki{)L>y&;FVfA(EE&*`#`jYUWQ&^y;d-Zj7b%*uxC%JN1yRby3z z@-Z27lucheq$Z&9Db`}G^OP-Wr>>1qG^z^tn8)o+#8j>~OH6EX z((Mu|GMRx;!>TS%5~5qQZjG5&t9-wqpwVXIwS`jsiNYq_5^|=_W!Gi#>>~H*1U=e? zN_d(+wkBz7q4$?f1?lU@uxzAgOHdke1Vlnh&`!qVTECLg4gBBR43FA=?Y0m=151F& zWlY6R`95XvdDOy*oZ2I3pP`VoRetmG@*PtJpDicLECmG#7Z|%E(eLeEH_ubQARhfQ zFTyvP(EDE$)kB1G^RZZ{6xmg2()~A~1;*9@DwS5@RNI^|i#xJdnB~&{I>&7mGUrk^ z=0~FAWAVkboDJ+{00BeZiC?r_?uM*!2G|;LQB;knImBq5UZ?pb7th4>{grBQ3YG{L zmnV*P6Bo_w-TChTLWNzDckPy~N=gN+obJqn8LmL@#%*t*iooGahY+wG+_yJszeIyU zFlSWjyENM7VMKS~vwgl#)@c|Rym+lb-e znq2?;soUU@bve0zn8?fA*56rqb8hcY>GA~H!uid5ZDrUbH(Sluh+n6NH9MoMt06Zv!p9W&=lL-! zc3X|oNU&iw3W<`FGzH3h<1yZ(!g#afi9@{5t*C8?)Tv`H>IDYDv`&g@;xor~J9x#l zr7UAt2VQJp0RPJx79uHqVG#7t!2GBLxPYn%?*(hb{423<@UY`WKD!rPBLVN=6(F1K-c zTAcBg6SYB=*a_LZ-p!hKZ1<_GWj!(#_hi59pW0Vb{}oV9HY8H1B~bVD)H@1u5L!sp z_M?${;tHsDZ>;oToQ9Uvrp2`>`k9`!VJ~Zan7zh^5ZOZb-oaOApzSwc&JJ}>i5(%O zG5DiLeXCu^C0-?SZz8HUDwQ=;sM;yLHlQV!AMbLMd8Yxa%zfsPmI&-s3#C&xGlG(} zrUQ8(wktX1^wl^0@E1M7_Kfl+)*Re3Sr7`O0CIFNeb*`~#RlizoL%WQzRR{&L{VAb zO5t9L=VA#&I-`V`kFkt?KaZN*A4*`$ zhYM064sH1FlTo6`k#JTq-{(@E`B;Rtv@YK{kU)G{#t7VkLhT!2J zF(FR_Te}ilq>{7+C>gSX(dLt@h!Wphl%0Ro2K{);%p-gTgwyhPA5|rO>N(Y&w!%T4 zo7d4b*TgkpM5|)&zUDlc&BhY&k13fYeyyAeP!l@?itr%PCK6ekxy zoj-W0B0NxTe~?fbRUW^EtWm|+pJ85Nk4f&HBKwv(6XBzA*z)?j>)6uSX4q>Z1{c>M z$-dRrUaM5f#IA65T+n%CQg0JcyxxM%0Hw0Ahd`#CD?JZk6RnadVY415^;!6f6-z^T_tHzA0W4c$mcny+gd(Sc0f{n;%7Rlq`|mm5H?#t&L!xX7PtD zTezW8^*v@H+@owRM3v}_gj{f`Julfq`XsfMrUgeV-bLPA%2YnJU4+Lv0a-hwN~H48 z<)3rcQzJfo(A-X`x6rhdTE&}*4J9YONz|EXzX&|fW9tKHCcKC^MN1YT7_GNvX8X{) zVY}>2Dw`wZvud#~w9MGw<9nWjAIrAPE>0+TOiW&vK?E@-6I#^C(14K;y;{a%buH&w zSp2sAHMLbFcd|hi#v73}-h5-|&R4n?i7r{=gQ?PEeQntBWXNT#XkrXwUN$6heXeEe zObK2WDL__?GO|M}8Yctu(qB;ESUQddp@wyxtMKpMz!0ADoKa6ZUG5}$M%|<|)I9e3 zje0HaM%*=7%({l_S--%tfbPDyzVq?Ta0jr_Spx) zw2L*RF>`p}p;OspZoy*P?F15gPbA`OsHZ_)t2ozo`|}~?_G$oXd^H_g&7ao_S*7 z2Nc${Umn2N7HcZ%^_svFV`0gmvjde(*>GbHYf%eL@eG4+f#ONY7N3ad=<^4D_q4WD zqHORr!WS=Fzuwn&v+6?E#lZXBqBV+7>{MIL=1%hk%^leu=Edg{>V0m}6|R5{zG~|R{Yl9e(FUxxC^?1nu9C81=TE?F+qWG|00mgw~ zPFTRCCP8W#o!CYFaCprCzg6yX-}~n6OC%c6kbh*f9?rsVw?1(~sTQk6e@Bgh)?7Ta zTG+_`&MwAS^umWow9uDyMc0C-yg(I~!j!AV3RFNck=5+YJb3*?#T0(OgJoe#XRU#c zf5h{pe3CxT$+$wAasTtd$F;}r>h6BgV!F&n8Ui(>Y}PBsm4Rt;!I79!WIgxHkLu`- z_IjVAk7vJKqraC=3!}?0Yepq(myev)pcoBc?q(N_V*5!!?~0ED=fy#OfBGS%+%MGt z%mVThn1_sA0lQmhMPeMz@VH%392g)D8HYUcZ*V_%wHUT>X9*ik`ZW@l>`~vb{2N1} z*W9-p|Ldu&;?d|Z);ShjMxDH-s8o5Ha+r5-yP$oC#eo$?%$wbm+hbDLe#sROR4wA& za6Ur4DW&Aqv2%t{6{6Z=h)9}sb}%_U6(9Q)-vWjTU&(iMAT8$8Y!Y;uK#T=)W}LdY zmHI~;9aN~IFUqYY{6j!Pp+b+U?xU^%H!nNUgtA&n=P=q&70hn0R_nW4==39<#Zbb~ z(JNhNgr?l@Q8T4;dcqD}yg#k@GgB%vg3Yy4Cp5!SlwIGVfK*w?h9(_yJZw5W8{J@b{vU%`-x^ic?d(Zu&)6Nfa zMwHOocZ|uAIgw31-zjh3wD=_~(~CE#em*;cO@53joBZf2A4#ig6>uu}bxS`s%3V~L z`B48GKdxkEBN474%WdPaH(OH2UAzsfp%-GdMqN=8-QJXtUd)4K(Fy;Of;(%5fZHGy z=fp7rbI3NO&9}@%5qk7R(K*wQ1Na2lnOkxW=Nx{G(`m+dUdbG@Gxm(xSh4QfOxg16 zPB@`M!xdEL+$&hsd5nfMr^T7+ZL^}+L)brx_4gQ!5sLkjjNjgW5WQu~zE2jLruR#u z+EY{7^g0zcbzCp)p5mZnOb+_rms!;|W6i2H!f&9@`w< zK#|)YzeWm=Geg#XEx6UY7~xX~zlt7mdX#<)3X=bJg!ot3pQK4Vp%zmuR+q$mYr!he zRb|@;jb2JF^}P(9lNo$74TB3_C*(mCe|JEKS#;})gShpb(j7DLsyz7*HUNhasSV4bh4NGgF&%7mT{X?*Dj_j_KceP~j()G^wy%R??j z|I3K~XZt(0BL@F=Jdm3-+SpBS@}9cbVB=$90SzQ=6^jY+5v1&=!P}6hkdM<2U zB_y%lOOKPtNyJ3KdL`^%Gbty@wz_49jDU6!uh5C|+3w>8LPTP^de&H^$k`&9GdlFfHp2Xk2gzKFD0c9e`} z8m7VaN2n{K0wQoJZTfY4zXd)8i-QIgRK%4J67$Nt>WwWuR^xwNfRE^!!EQo99H6J8 zK9R9+)olB;?^n5J%G(fg0~(8VR6_K=tSo?qwCnf=sFsn1@14q(+1a)yrBP_?aPK~5 z-ZuZN>0xTEh(cd!A?|(efky6pch@rl_KGAtNKGfzl7fj(UQ2+qIhw)z)L56mI6t^U zicM8Q<)O&f%d*3RCV?>(8S9RJFTfOIY#P8{Zbp#VlHb+4)?*D9=eTw1LBKOmIohvl zzsfg$*S{kaU}_}C+ehSNke$3dCc0w|<1ZE7*S&wAUW~xb%}MUBahR52Q!;f*dx~=z zw@z6;ajqxnbG6(1{b}W|Uj!IEJaY%L852K>6B1wCj9Z z=Ix=gc8uSpCB|MU(@*Z-&CYAj{>r$psvwrd-|anT<;pF;V{(%?R{vM)-uL6yP3!Q_%je<;HJZ}) zk^IDY{_?uBO?Eb6kyD+^EZygc(sx$)nzO`d_qV7^_H4Ik0eAJXA4u zM!;Tq^Y0njq9#_+Y9)@{U$1<&`u)3qJ;t`*z7zk-%!j?D7y|bR_vzI#k2h8Fhp}8j z5#FJ3NFo}mJz5CW`sKlEe#N6$53_f%`l0V)>xb1WYqOq2u3LVeCB2ekLl24T48Lz_eSI-3JE5fvMF!8v{R-!mjn6ZyQ zdFPq@;U+5|s=YcRM2q84XY*uBVu#qHz76oJMuz&xN#VAF2u4||P~fyp&)%c_ND+-8CozcP0D za`kVwvF}?(z5l%So5jyf7Y{_qwOOp0^NHt@)Ro;Ed-DcWi!{aC2E+zwqkzO|XP3Wr z2S96M*tWagGNf7K*u|rIBZAr?(TM)I9%MFGqhM7c2k8On*^XLwzS;kC#w5)*U7l}4 zN^t)Ha}cfUQm-uO<)54G^#Oq%^D;%uV6wq0VCduPxa~;z2eL5X z=cWU>2=r;vng;zF@ph6dX>e_ML8^sOBZ#*n zv7)1}LvYd-(^+0<&|@x`Y; zUXB(GV~!>6S6rO(yFq;2)}%Lg<5{C94ahPR%`;)&zbhX8Ts>G;?hGL0E~+XiK#>M! z6QEuNTwB?7cnNGWegC=uv1mqw?**IlPpqcYLnS3Y#!;WTSzKa2Vg%MJd{TkjpNE)( z+-wq?65#-$FC)oHB}`DZ7)5(fy``@(2W?)`&MFebFk#QtPm}|3Kh*0!J2YBV#foVuMP+v)-rO0G*N?+mQ_UpRvb?3z=t^77sX)b1Ns(!hICsVk= zuQAIBs45gMD7yX!Z2E3E#QV=;Tyg~bGSeq%i{H_=T!{^aDcr zq9255m5JMHT*&!`mtfu1`*@y4SH33|iS#&aMqJX>Hg+HFuuim3>1sgpw*peUdkM!57~%79?+Z6@j0@VCP7@-J=j+&9^CO!TjD_?!S3 zoIBe>6}((sYEfo3`WUZ9|EnQJXE;kO16SCEXSnMWnPN2;@o<3{?^-b84S3&mG;;Xc z=ciim=eyOG&N*19k##8~NVIV$LFv7w*?FE$#@n%usVD09_o(VDEx4tbx%h+`y}z+d zW7plasWvnH&(%T)>yN&y=e~nViwnVRwfswn!2u6SSa+M9`>J8wS-l^8+xZunAsMqi zYO~Sq>7ikB^U-;|mc_X*zI``}PJ6lA7 zBSrI@yFXY6I5>nK;yt5&!6$Hm%nHW#Xoj~d=cP{86`iw;TP5_9j==)$jh*N%#yt7+ zRjW{1neZfQ#=5#Zp(}QEKql9UAv1~Uvl@2Hy+PTteIKuL9H){WW^|loR()$Perl>` zF$FQ@8R$4NZ*t5LBSeed2EW)>UdF!uksMllXM>S7i4Pa6< zsSwqs$-Q03!tS4&*!9i7U&bgMmq&)b9Y^Efz0taEYoPmqBFCzx95-w+i;O9n-xZ^p zxkY-60hCa(R_rUHgEW<&zXOtC#QM0fSZdFKq`jqfj zTaf1;%!PHE_AAlLZ)eo=UsqdwP=QQN>ulX;DU^Hr|*{#i&b6 z2HTF@stMY!Rl`KeH@Gfp**-R{evPgIH_m~vryME=pRn|E(`fx9faCcWO3qc$iOVLx z-6qxCj_?#GPZ84@>5SaA2(3dCo^Y&Q{H6tzIq*Qcd?p}-D23IRXmBmg^fe}K&~wde1#=SWW>V}la&c+0og{g zR@TeYCrMg?Dq8z$?`hWe2Usm@_0IhWL2~|{#*cH)9E^kjozCIQV!uG|@Z@!=s|p!r zAE)=|T{DVKH8^s-Y;RtsmP3EXD(f!!-1qDYhyV3gAHRiRWeQJ!Y^<}p*L(6yhYitI zGcg)K+Z;ibW^~FVB2gH0%&}<>71XG74vpvSSD`m2 zWNpQ`_aawC@)wv%sQ--dc4-?kMx11H$dL1F_8R>oER^1(?pjIHG|h>SJ)FLCxa8 zxTm%#avAUZ*7Vgs9I3#wr#=m*tbUZk!8;lQ2-6_IAIE0FFUerCwqw(GX^1h%86jx* z^PC;a(eLu!*Oe44M=a&gZPCsZAxmjeoC!8@QQEi~nr;L6S{&2Vmb`}+)FLfa?tafE zSsd`ww%!(Z%{3!}o&SNyr*25lF_a(nb)s6 zw0ExO`Pw%%*t)5gW5Ro3RNW%17OB5+>%YLkagDk8c<*Z{r*c9boB*^hs^(jYxC;?YU%$qPdUB*g9Ppgt!bqy_2Pina`YCojk)>ky< z^-=aXCgVGMYh3EO>F0!Gn>W|63{%l_o(t{6gf_^xt^$`mvwB*^qRk;8M1T|4a`E%? zzjoY`saOW2v35e|K>!_!)|ON21{&@cNqWi;i2s)ELzMSZW*g#bRNPO^Jao%5FmMaL z=L((!-qcq z1|B(_Pg_f$tUj2*2BkG#ujnKSr`JksUY?JseM_5VCn7l0mMO;FFm7~?BN7jG{+r%LU;_d%j^iG&Y&MPsx1ZN!u@v(+t;8{p1QJrj zs>z#0(7(U95Dx>hasLg{Ed;n}UrTcayaJ^PtMga_pOv1DFbO1qU({UL-?cj{)lRBA_NFwPDnn$=lLK_m@MRAwhKzA~btL1u zGekPbKEj8aJFq(s)zJArUh;IG;2=Rw;NtDn5Xe4>uKnCZ{xeqT(kfRH)U^TG z2AF<)@A#0&cmvl&ua!HKY&D3C}RIA*aJc^l377qo1@*uKZhVX#O z1D6fd6hie#aSG-Lx}XEIrvbQu*7m_>Ey}fE+$-OKkvL!bqsS(Ksi_6oDYU!vt}h}D ziY*8lxgE=*Z?FlRr#dB3QIO?&EWgScyx2OHewNu#n0aM(T6{yPBbC1_LrLr@@z_oc z|HaaP+DwT4D;;j5gG%kgls4RMqS!^ZcnGz>^0Ak!Y7#Hk9qnV%R%|yq>-=-mM9n4F z|7Hwd3{-b`)jx66x?K&{jUJ3}7U*XCKs>qE&DT`2u3jWZV0nD8M9hwY11F`8b(wq> zEpdD{vGmfR{+27ZBE%(Kwt{o7er`H}Q^tu^P1|DA%{5V8a*x(R{MuvdfR(W?MEgp= zLEG2xZP5o<`>c%~{05+`#UA@UxBPSa`7JAN*m{UE|5@j;InL`DTKKZxV>e5ZGoG-j z8YXmQnErAh6BYSuw0N}(Zq)*5cw~FkV6xN>whE}zLET8I#Amht2K^E z7;2V}JAVA`(lx9f0(yM6E%|Aqft=Zq4kBv9Wy1b!Uw~p`|crV6n zgb&yx5PWP+#ZfWH=`h=d#W~6h1>!UUa#N&IqN7l~YY3tuu?R#dC7I@*$F{qN-roMc zZUDM0lp_4{SaP#nC$?*g^gb?~bbo)78xZ30HQZ(&*>l3a__)K_xGv7#<>PzXmJEdc zMaQTsxZMpt?#>q*Ws11v_wBRU;DGVG9`5NWyMBkTj2>*(ikl@ElIGSpB1|jtl-L3J zc)|3K(9ri4fft)(eSN0+r+y62@Auz0NO~`Ov86XG1IDlZeN$)(>HUKzY=6ws2;WFYD_+HO)G(MpaoFk&~#(clnwT$fTP0 zzUmGJmGNdJw+K4X-Inbdf%B9$Bl-#}yCv`0b!g-#^TqGzIE+cglpBx6$iwnm!IW>* zlpFXFDhMyj5jwShu0=#~Vt)V9fN{LgTylM-+z0a8^wRQ?hY0HlVZ6b0hvyyx;c{Xe z$QhX^WH8V^y>(bF6Az2e76B$pSewep7>}QuK9ExO8WQ+J`i0&&No<^p#4^s+sR;Wq z80Mcl)i>|+o3OcnAVR*1j^*A%$5({oR@3#kuiaLzYX?t|*pgL;^!Cs0M*7dQ>P3bd ztm2JcjAL=%lOJNo-08`&^=;$h+`ORNGMfrp6*XJuz90S?E?AAT&I&%Rw;4{IqW_*h zeOdCFDiSP$osp)-JI!2-k3qjyi#c(tX>ak(UDyw26ym05TQfeAYN-olQ(ehoIhO1F z>FMct%LNa$Pt&RMQ;es$(=ynX$lkPqi}*`Tr?Oz0uy<>u4`6^;yp?VZdk}x*l-&=P zhge#TI;;pK+rm&|eIQGfTXZU0Gu`nhne(=#0#B8H0^Ylp;gz>P$|4Ra+oTgz0)>}g z)%nLuTOp!EvTqeh^qf!R@MJBKO8bP$KJQb!)j`fo_XXGWRAb@%CYH6)Q$l>ow5w0e zy=-dECdz37Kz?r(agF;)|02Cr-Vfwx8hi8hW+rUH2HpY@>BqI-!>Rv~m@dYHw7rX< zL_I9qFYIBNALjhpx|~vLm!T6g8J3{In)$d!BowdN^punioUqZxfL%A&oaucnu#;_1 zGqtxr={y@-4)b=dwdweE%EvR4d9#l^JzNAEWpKbU6%#hBRoAynI+p{h{ABRDxn{Ka z3p8R2be#f?P+Qy>+CO)BTW0{oYwsoz`z7`Cw_S?J4KFTbv0LmZfzcNhmLv#lIs8<} zB;a&Qo8b)_kq0(&Mv41WM1EYXY)2vULS==-j3@3;#mm!Q9y-O-NjdlGwU^&N@ok`>e`*UqO23OyrG)p`gU*zpE8N>PY>4`C(|cuV2J64Kbwtb#MQ)Z(QiH zbD%9+6|wdM3$XY;$SK^zAs?OEO!?f#S77pP8`6Q}QJ@N1$X2c)E?$#&rekPAo2&cS zBy{&p$0fJOom#7lH%>qw{I+#h_(v_!^%I}a%CDxckg+f{)`B?}>7@^^m^41Fc%&;j z*vmf|F#WFt)q=stvpdg5F1)QRz3V`k+vl}ZS|HAIB%W((8uYFD8e_E}4Ot!7F#J=R zcYF=)BzBmZKkAq8Qmm^DV;6gAXdx%m?fOkxb-aQw5qCye3tUg%`306a6-z9xy{j0d zbezuuXA$f&I@yDGq=@%G?uWHiAR)FKs}iu$>S1bp>1vsH?|b`>oTJ4^inqtO57oJ( zk6-$%Cgk!EXa4gLEzTRsDSj&a%7Y7DN&jK<4ri`h;K{c)r9JO`6;cvJVM+oCf5GZR zsi|m8?RxP-BhQLS=_LLpt!LiOntBwWZ;s9t!#jO6_wO5AwvL!GE#Asr)ue0%?@Ep2 zx18u>!*eF4nG)pOzQ!R#<4tesFUq>570pHGKd3<*U^_(0A7gz02|{sh#4K^`djwkb zQtjTMp}L`;oBZ|y-EX=%R4_M3UpFwG3f;MCv*mMDyjc>>v zhJK0dzSYr5@7tkvS6rVH{}>i#Yd}<7yQbz@oSDd#yE&jH&c0e5odtq0!vnczVNi_N zBev2aqQd^ZCAZd#{u4%@XbGP(NGp#{KGR&^uak2$M=5JTdLB3(dGUlI`c=^@O*zCB zDqm*H>3>zzmyou&^l>dw73gxx}@51*5J{^m!{uZ`M7hdit(ne}wVwzJ^prab{o ze)IZCOK`tDG`a>cqy<^6)Z$7nYW0lzLyf#+YM`+BnfxS0X#LkR|FNDSV)fefFZvES4(iP?aB0Go8nP_9D z2m_Cmk=-Oi1>|z)cpJ4-)u%e<_OwCDf?n4y2Ubui(YBh8O4@}ACP%NM!N#y&Po*lI zI2r4O|KnTq)YQ#bKrAD|a2p(2ZFp=_YtbiIlmG`8@@4s$7gS8`C#d7{hbgTI{CAiG zn8Ot7M6dM9)uCHAFLRc?{tZh#Vo`$FT}j$Yu%et9u1G22V&8qwawetd9X(>~71L21 zI&(k$#oQWEtp!NDa!bI-0>;)|RTnM2?VqsUp8crYQVTnMAPn}Z*D8h8!iWTAkchv& z;jo!`bCnv~Z8rTL`=Dl{U~2VCg#WS2ukC$Cx!8**zm{;6u|17@1#Q`qXfWOL3RGb} z{fss2Tm`R(aEX;I+pkMsFs)B%XP8n%_Vg|MeQO-5MK3yjNskOQaB}%KigMBXMoZgK z*hj+`3(?_Ol#^5O$Jr7MB{3*qK~gdEA46yTSfm?*Z=o3*1+r6&3cIPOY){j}VP%Xt zO9I?>%Bp_fKz)swTcn0MjLB|HwY+1%n@J6ahK?qWC>oC*NYCejtD8svI1_#Ar0DCd z+Tgck-ajh&p( z9>Z}V-=a7pt=hmUM}0i5yXC+27!GO_hGTyGK&&tc9ggjBkFNQxJ{ebOJOH?0fs6*? zKT`%)Os|}RS1IJ?z9G00Qiq6HN1N2YnwpkarFUUO`w$$FtT{f2Dc&Aw_dR$K$~y|S zC4F~hF9U|kH$PY!8o`7mGPqB2t$iuIEp*b%2#Zduv-OddabQZAUeUHEfeJ2`cm3JY z1Dcp;$kuXhp9geOKN@JfMMHKZnLy9Q*)qSn(i{QoK2A3 zq?swZEHx)mQ*)kYgv!)3gI%VU15{>e4rtCJIg*)Dq2P>wGY+7Fh(OQU|MxwP=lSFZ zC)T>xy6@{cuX6y0aV7EbLb+$c2mSu!SNha6^daN?=-7(Y>H|TF4X5L|?ej~cP^qEk z7wW2UjTcCxpQ+vP+# z)03wnQ|b<)KDqj;NledcdZ}I51WTCjtl~R`kvU*&ck1xrlT zJLgr?z|;Pwk=VW<-xf!gP4LEfvCy7JhaW(R{|Y7uU^qFCYm&98Rw3gSJp_ z%T5G8#7!`@i?;#AZTWa#jG{(mG}@fY-Bj4FQ@U=XIEli+hxli7l`R|Q0nW#OPRoCw zIh5|G+xkZ3+rQFb-`Sz(xqVC0Of+iW)l(Yz_w&Q35CW0Ai8rKj^YC)9ZzsE7H#9wo zW5?hsA#D<@d{t&q!U*`}jn1fZKSIVuE_reZ75288l+u>k7DzBAL?*fP&F!+}cSWc& zn?py&TnBZ$Q95-Y*qjS(idQL-5U;zfKfJ24E&lEJBtxs7Kod5kx6xKgYIf3<;DrfMq{o**@HEb=3$}X2XIFXt$ z`r%UWu6ZC3UhQ)jHGi5SU+Soq0k|3Gtc7k^*R3*e!{W+{^msRS-J<)$Q}xl{zF%dp z=W6}EPPx~sd9Vi(Mq&a_Sc`m46=_Ya+K}HpiejqBIILfZxGq;8&s1}%G!;aaA8;YY z$7|;VslFy!-_saM3==;3!f0#?I%z$(W3BG1M7n{3;q7#?DP0nXnoO=KF%Wn$Sxg09 zEI3m)>zaFp0-L%ss22+)=kOhm(iz6%hj$F*jq{L$1GddJ|2_);z6oJ=~$SG2gg!7PzM(r zjh$zUpS4OdZEoTb5KeYp)GVs=;@iiHrrH;5KXac@MaDh}&^zt!oMXus45-%h{PWt>S)2#6ZlfbZfPuXX35-|e?l@~EmaYiuMc z&u{R0A-w^4?Nx16LuWTfJ#tUIYueEpO<-a+maIwfA{kMpg@n8CI-B&Y)lt1d_RWmH zGxB+n^a^s6*Du`IR2TgRS_YC*|KZCqhB`YZWZYbC)HZ6-#E=7G+`(hLQlaN@{6jOL z0V`GDm~3X_+Bv$CC!1Rxd9I?>r~qIhX@-98uE#|^bB-`>uUsbNy?2$%yT`Z7IV^G< zxhJj@14qV1Q(z>!L}!}b?M`T|Hz~#&SE9;F1{cng`g}}h+V!B^p*`ou|nfY!*!ty_oy5GA_>;} z$8ihiY4|M#ZU?hbS0`Ex*-Cf8|dtVmY`2IbMag=FCF-$`P}~xv8KAQ^cqbmsQE7N3&FzJP zLXhS=jw8M2Q{2uNs~NM#2Fy>A?@(=Yx8iY8`y!cH!(ARhZjRQv#%>Mb`iT*aRQhtQ z67e{5&8yS`C&vGXS{4F+;%YvlPY$P{4OlBgawTftb_0Mg87)>0P2?Z6yBvC+RLqD~ zyO8f_&yMKKC8XAsaodNAu`(%phwM;H5?T~L{~{yohfgi>zL~2&;*KA4_)PyEQ2B?U zyzY9QLwjk3d2xNcSxrXj>B1MorCdC3!a{{*Jhs|zi@;g^#4a-E@10$W$F57W4Tbtc`#?M1E@TTy;Dg>)@ znt3C8$8!B8bL(H;>cx4i)lITm#IIIYi>oQ!_tv@TJ=dEBLt%Q30q(p8m~Fxb5=gEq z7HkR61S9qDEVR0-C=!sl#Q}bvvxKzGJZgVjqnXmVD6?MF9~VGfZO9`4#9?P)ak-R_ z)l1EleYevl2|E_{IKkZ-pl1jC5t?vM_Cn$TWUD{*9N+8uP}HR5(qDm_2`>H5o+XW| zPUlB!NPV=6B^e3d2ov~8&~CYOi&GrUUvU6*a*JRp$loLtyT)t+NzKH~>+E@{e84vA zsjoGqBXmyTjE-L)%8xNw1>kQ(r!!rkw6!6DD?>>&C8?MGi4Y`SdD8OJxcNavQTC2O zp9km^u@ua#QeH(iin9d8z_~ce&glYLEQD=6d#&3^&xgH=4=6zekM(>^EO&okO45{k z2)nh5S#7j^_vrn-4?N#|J{JnHgH!|agZW>CVAOQ(l!B8`86W#;U7`ECVTCchkx(~( zp;2O8KUO2@M_49&3~S~S-X}C8Ro^U2@1mj{yF9{79mV979>)E`Ct=sO>a|CV+a~41 zveTg2Dd(VLO3fpBLFrvP2(8m2tvdBO(uo#z$^xlN~9x*vmEF$jx#lXhwsgZ z69WC*E*7yDv=5Q6r!qLtZn$^f=YK70-WBY}$?W{;<@!YYGZde!fl4J*uU1Yx0l=s8+N zHd~7{R+i%t8jJCB^EltQq~v;5UNPOt1vqeQ8lE^6w>UF!_ovdge0deX3Ibi$6gSMxd` zYf-4Q5TEc!7_<4BeLW||^^T2cA7Jm%tJV|KJsSf%yxw{Xb$1}V_-T+w&M6yV)VU<9O&{mnVB$Hdb7hR8y}aozAb0a}F}9sIUU`cNx)FhHZ*?RdYv^gSqOUzI zHyp6C-rEb*hgS)Ymm?lgLx6!4D~UUo$0qj1W73S|4%$Qoy4W^S*Ni|7z_vF1GORdi zFyW6_7KQLe^d!L341%{Ei(je7p=t3@t=`zUT$??P=AUwSR>SNOU|M}0w4yqalLD;E zaPdjPS;3@^7D^IS-#NT@GffxI0j8J_FoIJ!6899Km*nvw7u_}8o6Emziqy?3q{ z1Nhy9$gD#M7^jPSf%VS0`&?VRZ`aO0Pd#5>LU{1rQkY`MF0$DSZw(L#aqgp*NTBw` zmBFx;1xvH9%6J~@k^k*(wqh#3IV-UNx{K{Xb*R=!4S(G*aG%-{RQGea5_q9e2WAR8 zR5d~b2WMl01z?>3w(o+KP+rC{#}@dXkCB5lnp1&}j`7dadQA^}XpaMD?4mXxhegc@ z``ih)=f;1v0y_#OuRH<=bDb{{fV%iMf!{;Id)^_(a5dNu8{vouFaNU;-V0Kb7}-tb zZ7>pQP~}S|pRa!>5B>>y(6?Y7$&~O?MH&frZSs4p_MoTZ63mB*KtkvtP5eg{1ysdf zYrG0BTA+o3@^^Af7>fIS?kKK0$}(`qgf$%%zcZWAtJ zhzpQLqa?=pm=6u;<)a;;)d~%m3z9qKZJLNOWH`#BS6`VSQ z8{-G=jd|4NwKRslpud%S{d~8M4v>eDo!#PS%>y5%x#{Cyw;-k-2cq-d|iqqX0j zNoL#&U>vaaFjj*=9%old{`1So#{P}&$K;K^^-%q=UMmnz*|d_>xem8FdcPM4Us$)| zC!S(1{3^B@_gM1S(Oh1ceQ)<$LYMTJ__K@L(dr{xsj#(FH_2-tU7D)+=sxP)m!Q(4 zmxb9xsBSH8&a7K0sYY`q?R%Po-p3WM==Wp!BqecDsixykN4?;Il#OWSzmsua=gEs? z_1DUCLNkDIZ}Ar=@zuH})F*v?Nx2s;4pFKHTNgZx4 zi;5Qa5BC?2ZVfMlc`L8FWz(yjz6;Q&es%#~)1s`u8om)G6Z|(}S7R!lvPFLo8L7_- zt-+_*<9C|t-k_K}x3wt0m>9(Us4wFrwYUMbh%cbp09v2-N4}hOHPrWn3Z{12h55rv zTT_nyg}Vl?X!c(myZZj3@BV*?(%bJFz`no{1uaIxyW2H>(yj5K9z=un_yoZYhAgG9 z0|XJjq?g&}>pKy=#9VHxoes<}(;@M~!c;Eq&|^-QQMT7DtXqe&+z=P7!U{3$VbsMZ z25-Nz|3$rSycpRaL=B525aFL;=C0{#;dd)AV<#V@HfwM<)?*h*fg635xM^CKoC1*{ z^Pm;>cCzEc)i;P|!i}i>jG1WV=`ftL;Pei!i2a4^6l{p8@mtOdY@C|u zSHZMn`y%tjl5cAum#|svimub%y!fcfV zK8XxCh#sz2%F*8M{l<}3f0%#6PJSOdIwK38f>t@q{`tzj!Ah|i7P4;le+4c~6DkTu z>gp+*ykdZrB0-+oQ32pSN|Bzb_1r?e3{_-CbZ_-lRtG9-1sqbDZx zVE5JiFw`|!p;C{n0K!`d@tds`_@UTJI6r$7;GNu*g7R0b7RNdG&jcTv6`PA}cU<{m zrmM@8-`Fg7vZ!0C`BSObssT@FbvJwB(P2nmISe!Etc#i)82?E9yd8Z3aW=~zd(KCp zgyo#N*GlHMM2?x&!!Pm9?NX_5V1AD806N85w43iurwJ2r16 zW01tRmEo{d+8$5y)b2MNqRq22F>9Hb#QDw25JuV%@_&H6rKG>2e>0lEc)8_RF_Nv* z_<|~ljSn?S>iz*_Ar2S-IRlezfcUzc!{a&Pu?i>0 zNtqV%{J*H@kCVgdF@Wu~!%TKaaTL2Nzc6pL$S?{;C66h^af!w_l+4=vdav#MW|_**DfbzaIc? zvC_k#r7N6X9qVc`9A<6fvOzyLq)w^L4Ve46Icg#|Z-(@?l7ACwak=XivMjybFN;AK zt6$`nQx%f|rpks4d2kj!TEn#J1dYfuwlF8!DMC1N1>UW`2P(_GEJ`X`h(}^X)uju9 zj-hQT;+oMza0ccQ>=z*c2}1ny198{SxAcKY{T46gXuYB_fhV`d7n?E&&uJOW$%Cbw zB!+_UR~*2wkVZF2HMyDC2c0n)`j~%i56T+H22MAHvDkTD^pD_cDZnJ%8C^(Mvx>I# zBuh{Xk%@pV6#s&Q+H*$MCewEWQ^8PIA901>uMS|@T&bnLap%P&fWkol@rjDCJI+9> z_xpML1sr_wodJOPZ2x(u^3#AiDx{q5yp1yz-u;R4gpd4|V6r)K-666FKYkJeBfp3} z%SjzIq#fqVh!2T0ZgP;K#-H5hGGcAhiAlsw8sX*!wTJc)yN~~ZZUgb^04Ca^$22kuZ{|*Q1|_tJAT%_w=|C;f}>gP zsKX2c)#Bt>nWt4vW$&KHVegP~J%7c%rboD%?3@ljeez)Xdi(y>q_aaT&hp;&iRs1u zH52u@4zB$!ive(tbY#*6m5K_Y9N^2T*0aDJS%E3wck~H6XInsN?V5Z8lce(bHcIOq ze~-tkW!eG{oQNkwFfudE zZm&!4oI?R-u&7bv&5&w=3LPZ9cuXz1K8rr7u4V#z`V4oXb9^~Fe_c*HGh;tidf`BL zJ6MFTU3&zO&^Aeb=p|FTbFUa5OC)9lpE>(Cr9zK;(3%`c(JVQ>t1+@;AlgC}crw=b zjsjeFe?rMe_UiKhR2bOTY%mYQ&sN>Y#1@zVG-*ju0m7gZ`2$q@#Nq_UaPNBmj^$Rb z_~b;ygChsWqH}ji`nUCwBOkqZ(e!9OQ%LN}H+VQu+$+3DY;?VY$Vp!R?%YGKHqCbc zDLx&jHg!CSuz3ZiRF!H~oN|$W*4dDF0!a6Z#X*VQH>VLvkHVasfr+Ss&uiExzz7z& z73Z4vDvC1Y#r<6Pr$k2uF)QQ~o_8mGM5H6Culyf{;0;w-Iz&8kZh|;1vK*7a@CATm zyB>p0ltTsbl%NVgpKWCL9O^#QZsh0u-94yoFpaMBysfG^yP_A;0T`$iB~o|1vF+(D?Pu1SyLg_NSVvFkynR~amk;*FsoMd;}o7?j2=a@-|CF*ZFAC&T7{&r2X zsT}Vc=s%AuXMmT1+En1W0x{X#ck_g!?Fx$^WuZ)ItfEtEo{gfSgQBq@LjqS#+d)48 z*$L;-1Q6giZ`+K~r!u-8&FrruC08XvkgvsvL-6-u;w2cH$?8ocz!20svej8nl=T0U zY(U0)X##z-*GT$?KIljzE9sn({-DZtB>quFdNzSuvC-@B{r+%|njI^F-i-+I`fB{N*U+ZWnxngo7OX1~N9T4Nfnui^aB0~L zV3@b5Q?IoiYyEe8OvM8jekn8AYZC%53PHlc)F~FJ_cV89J{2{i0{J-rJfk`1t^w88 z#&!&zC0B@!0N*#bQ6`_=c0-s+{xq6~ zf%OA&&rQPw>56M2QUQr6Bmf!Qi(?kgfj4svQ7A@{R0Uz_+xcDRij#St$s-K}IB>Qa zj1?wVIJ?vHJ~}5hG9IAdcsm*%<@#u9gde>2E!BgmW}*(G{woETay0~QhD>f7SBcC8 zUhPo#1j=S~Lw0=u5XOnagOO2evy`yYnX~V1>D+$h-fT2q@nTa6QF(h20Ka-g&`RQW zV7CEU>=lh(3ar!*>yX8mMR9=v(dFU2mZ(|-z_z@SjAs5pumCEn{j8~@!*f5QR`i4P z{;3Z%G+3SIY7_@UQe@?pUq#p0>@1=Nm#u$O040LqM6u9DmMT=enHXkeIa2qIQLr4{el})%=Du zbs2A=?Mg&OO1AuI5@GI?UvlRhj;BDD0cO%At$M@AI>NAfh7h?PJ1C_NY-cEv#jA%H z!zA2IjN7EB9fw!q!qNyb;H&I1_q@xT-B*ZaCKnR9?a01Qi_AyH1-<(k{*EN-;k#h4nJ6)~ZEP>xgw5~ZTlJIZD z{u;Gx&uXXr$V1a%uNB3^$=$0Ji9&)@0=LOC(D3K>-{IwUyS|!{E|TVZW7q7i`ABV5 z&Huo=w}u;#wzn!XJ1FPiOD0;dZ4X(gAM;9ELS$i4!fy4WKZQHc+Wwhw*NtYxj9|aq zkaDM2g8^v+s>Hy5iAy>0ST4@_TtBRrIb&`Mo|CwmS{lzFL zmJ$ZF*4fc0^XNr}J|fE1So`{j3O&pj_(AjQF@WhLz?yQeURizmbh|inoBL(eqJ8qu zMMp5rqy!*<0;Wd=5L5i=+uq6+34$$)_t+B!*EaBct(-jIyC@(J7=)2xISw|I+k8Bm zto05hdL9xvR@9!`5KCTJdn&Pn8dd7OX8Kh%MoQD+s=}*V=yuY7)nY}+mtlw8Ut$6f zN=c3)?6vVO0>X5UY#qam=_1LcT&S0s@I04Au9QV^+`TnjiD3Kt850jLGD{Xh6L-9f zmfK~T_4!BuH2?;8Lc{%9PNijiF>8b7NxGl$DEL9JdYOUM+jmdWzYWa)-RM$ zyBJkVvHsucRDeE*Y`0k1tg)d=)+Kt!lDsgTJNqVW6#=8^AH$AI7Pz5baqTSNCFL&G zGRiBO`t7qn#6dq;%_SfSBLA1^W&dC0r+SEH1+sZtX~+8JmX0Errt|+>CfNc=?EAY{ zekl`3+dPoSKgEuQ*(^cOqJepifE$+Kyv9fFwHPbb)2=hj{7)bn3rtGjnh8jsw+%T! zqXU}9P*qV}yd&faQ&$Cs@AvwQtEo^AgANP+>v?}mSk&Q*M-o55fmWsTw}du9vRI8! za6dx3GGb*&t{;>Y8J|cwzK$9KR-|DTO%R3gsb^2z(XcHELA8(tzNL?zn|Yc6u>)Kg zo(cQLQS~F#Ru3aWrg2S_DDNXikh-Y$r{s@~UX&|!CJYE=4LFbtgw#!$7Aae?l)P2#z75*ymZRQ+ zcXb(tf_1X!C5d+^f?`TFOorp&J5)sc}vsKQNMoe$;> z0^0jETB))mg^+K6JV*45$VuuVyLToD2xqDOti+&z7R}}5Wz103w(L z;sL}O7enV4{}E-bxQaor4U8SX?4_Uh4xxZc1h|*DVuGE{r@HG#y3!#Hw1&C(MpbnK z$y=4$2Y^dCkoRb=W4>Ah$cX_gRseX2BQ<{1Vh;J`Vw1|9yMWHOqG5D3s2C#m&qaW0 zVz!vedjB%vv6H&l;-d`YU19SJ)|F^^+g`1{UT7B-PBP_|^|OZhbxv=VeM^o%eCr8g z!damJo`fgb+SZQ-S3s*Wv0B$EtW{^Ku@hDRqD1Lvrb1 zx4wl1Y5mI9)nrFRZ^dDcn$(SjLr;7MpY0-UjqE2YWZkU%Eis9jsv>OZ0D3O#DDn~H zQ9mF|2w`^SqY4N0ald@QC16uKY(V-45;qU|pnihAGSEu%9$ubXZw_e`CHSgFiW~71 zf|?&(YST@=JEvWTO-za?d2k`6ATk&X1*Tr8fQ)%trrFjCsRZ8zJ+dHHN&QC~_Ux#a zsU%@04EXT7usYm98OE7B%)9i8)NdQh&!>x0sfX#$IvIJk;|H`;IvWD+PPUQTCNbJ- zFXvjJc3HpS_ufc z+EyFqkE^4gO95Slz~@`_+Gez;il?ezb`wKzcJiV*53tY3Y6U(Fi3CFd3e9%$-Lp4u zhF=C{bkzY$gabBr3aR@6knl?d&7M(qbCCUuNB-gw@Dr#rTq!ictHeE87On|ff`om= zNmSMNjcsbWadO$H)x2|TJr`1ZfX~$yaCslK2%JBDZ2z-#G&h|CY?B9p{H?uEP;jdw zI2M&Bq8WBl+#F7NhT=J}4bx9v=a0;i()t0~)T;!jpm14=a0vw#N9`|YB*Nh#w*tOZ zwgxe-G1Ks1BfVEiBCXh6T9>(Fp%F<#i2-joiajn(_t7BP9-dMhpt};zZv%M&$HwKl zls}$`d}o)fyb+Qn`0lQJa`NNy_`17qPWs?EQ>P|MS^7Kt4t^L;LBTqv5`F9}K`|T+5{X;7rEf^@03BI?rMxvuXP}3g(9d=aQ6UtV+ zBd>rYtP0qz;fs7IzecPk#&d0D+{ks*9ixixmxTGQe$?xMtAkRIFzkfy3tD_ z6{m;s{i!WZ`QXZ`<|&xXT_c~#tTEar(t*6$(I-vkD8TzH>||sS@NEJ$xg>-w;9o=e z)P9_=!%Xz;jM2l?IidVOpZ$~apIh02JwET+{~_#)S^$s51D^25Nq2?egc|-G4w^#j zgdWY6XbxHvCT+^8M~$6P6yII?AoptT>A%fw6g=Q)AeF#k%JYwdmSGPq*xKW&qVq}P zbrIOJY}Fs%wGX>qwJ3-aKPy|LS`My~nvR7Ih7lp=`5E_-w!{g!4)s&_Sl+JI$z2`> zYh~-R2#+V7bV9@Drm}VLmG@OF<4(UoJNcs%xv1sW`Nk(t>9zD%<$H8(mim|_p1BiP zPjl|^4$@AJwQJ1R1>Vr}FxVC>CV?1@a+mpNqq6dk{;qAp6Ol@zDq2?hi{>%iia#5{ zs~PH667u3caP)$~V4@CNp63c&aW-IiqB&;Lg&VtME%apzUwmAo9b6d~Ym3#k(a?rL zIbJ7PUnbc8v2w~KVDP9eLq(V|Cnc|Oph-q$pu-QG2k>K&lU}w>J8lIdivlmKP~b@z zASr;gnNVrYr__T#yBOh!`yW2H|J83+ZD!|t(SK*(xRpbF|J8hS;m7!mUZ4zNF-`Hn z1oeU5lV;iCpop>oJT%3p0gx`4EQL`)00m=p4?;?kS*5g0-7z@V>ve83O#kMsPWsfT zQ*MUyDe131WIEO=8f`nbm5U2gB3=)7dsbno@@{qF{uBqqNY>{VZZn|(cT3b${pOg{ zc$}#z^U)?rnait`?{RGAJ^!d2sb-|-LlRt5gk026f4i|Q*tdO|E=Lqe+Hz*#2Uf+J ze(mQHFRd+SqYHOOv$)e}=2&|kpU!kbNV9jk)in+c82)sAmVl~ZilJ$yrabN%#y(w? zd8pHja=H7igY~m7U@Km26-HM;QB+Ls^10dFXSIJy$ zHw9E!DuH4ITO!F-;D%|#tL0PZfk-m!djd^9){!CCU0_GW2BJtEKL-?>C&X*ZOi9tQF{@ zdO@6!SnR*kMGsjiW8hOM&KB8J)Pt07G|^S+nta)tsN0o;B-okr!(#D=VdsE&oK zWOCd)PhH_$^`md_9~Mh6^AB6WN)@IpES?v!wV$HnUG_)z&8rtkFx6m8HxL?mT<#g@ zm{?0_yH`oE`xs3akB%e3-Y!E==wFR!hqRu%EuYrUG4G1Hv4{1iE{h zxKOw$jvEZvY}b0??{f#WG9EX2avS%89pwKi9IU6!ogg=62aBMjZPqvo9b@0Lqwt~O z7NVE-yjVDo8LVFcG?L;5?uuAR7y-;P`WaV&5rqY@*a{e70vmT_^^$qC1_5FU!sNZ8 zBOA~~DYUXpmnC7thHp`x=c(y+LDQ?>5=oag#I1uD)>&JKfgKhA7jjuEg8losWkjH`Ed>>Js0Z;b0I z7US>3*TMIJ;I;l908ARc2ohuQc^1-(D`DmS7l7&F>!^lNG++*LK5#}A z32DNAn4$Xsm$hB0u18!=B?;m`WZ zZhyH~ZTI}NiBpz(W3rmt<|YzTEx4-Mj`n`a8!k-$z~ZMK+u0e2WlNCi~l;#PD3b!DZMn)dR4k2?D1 z--&_PBRH|ut$U;#sfCC5xm3)yI3|w4&9@CU!DTY3U+89{mu@Xc9Q7S+M+vB5Gys(J zd7vK<3VM?G3fa~MV5Yr^{G~G$#GF^pOqqmj=uD!~-q(208%#feN!n&=rwa*(a?QhG zJ}ZR92vxhV0Y%F~@_ivKyVH++-704*IwR4Pw645HBe*jyMN=pd+*NF4QP)8{a=sB` z_R3eAjKQNq#BEW=F_ucC;6i|K5vaNWxK>u&pUL zv}tP{0$Q;)MB0VQxkd@aN#shMzSg_+yh*dGbI(hb!cWb`B54Up*hvFyBXe=lExGj< z_>nXG)#f4~-6w7LJV?8NV#RGr<@^4YsDysp6X$e{`?=f-qHOM#=p_>qXopW!?_-Uv zMa;4@OS3(X(3QSraXb68sv*w9(5$w*jOLhUb>n9>hOV@ecSXiARgsAXb`+zP%kHO6 zZa=j?v`&cl^Q)wY`GrRcq~-%(o%ZfQdN zK+U#VWgtyS{Q0&-wA79X=q{zpV_4Mb zkpto6&r{WOW>gYm`ZKre8`V%JhA1Y*jf6~)!fbb8Fq`1~((Jc+nDi0bPbLfFLo6#>mUrH!+vN=hvl zkXaqXenMSCcYzh1w!Fz1C3@r&>|BBi*;q}!3>ASZb*m?LS z%K7ukS{GAZIskzkM+^}qY2#%=xncY`<9b; z%-C4WV&xcUuoz(RD7f**8N?pBBP2s=P#M1g9&mB+z(e4Vr-k;OfAsNw*!T~}ys^n8 z1FNFDZf>?l1<914z_VvD0&VDv`QswHFi_=NG%ET!IT8JXlx`mg6Nom@4~Jn@DMvWzjcyzw7AE_YRF%VeWSF9 zE5*MeOuutBbYwpeZ2rAnB}y`2zB`TW34`O|>>JQ@ca0xK{Abhs5SwE$qnp6$ZM(JjN&N_W@dZx>C_|Di zXrW$#s{+g$Ll30NVCk9Ey!{J~9dkEBEb+@~HTmwV%Pp-mS_3Y(-(+7bU_!;gld4toN^nNl@T z4GHebK0m(Oaa*n1RXV-@T-JH}vYp(+vm3E_x`p>wd^s+koN+ZAXzEC7jif#3*8AHf zeVS`N9GA)9$vX1q5U&O4%zo5q%dY@dwDz@qT2c_ z-{Kz#(2ir=6Sg`Q)78Ltq=(=6upo*Ro7~~0r=>83lq}~93BlMlD1Gz6k}g8~o&U|a z7-=nu|20|^w;L>lLM17!HN9BLoa-AW(_P0tiy7{F&Bd3Gi{qHNn``@GGj9xQ9=>>7 zPEoLfs+4Nd6on-{^e3U~u2F5WX4QQx5!i%C?&hErybB9fH!=C*GsIrU>SUL{%m&H( zQf*AUD+;iKg4H;H{DAXrnmuF0aqp|eD#|BlQL=5#<7k!*2_PlsrJB30rkPteuUuyW z)j2d33>*(sAg}~Ho>seX;*gD;FrgoP79x<9!&?aN-&uiOQpEb~wfKqV3&MVI#8<<% zK(di#0WASvbGAAcCb(*R<%L*C8zccanv^Q%1rhc**JV{CZc?|lq)V6hVGc;UI8JEd;&%3a8ZM&{Ox>b*y3)Qzy!uz`dFw48Sf(|rvC5Q4?L zMkeuIz)|wepS3cvAJErsB>mXGA9);wJqe}C`?gs`u*M-xO#P3J$sb`o}!xEjo-3zKr!qw@Y2#hJ?$NXe6zm0|d83FN9|h2iW$V#{#Ri4VD~e zV~$o60xkd^9MHU*}rx3Tg)-S=NwO7XOxvMv9dp7E0KT_Tu&6DJz1 zEL{MUbdHm#nzeKbb%Z_tE@N$!$!I+{xggf68;V>zfy~K8Ru$t#S31m;7XF6l2k==> zOfeuj?T`mB6hptlS@B0*6QHEaVa;TDLz51IN3~2w(7uSp7m(tbq88#rxseF z!@;uJYL2q(M)yoS?* z7h!!Yns&x|sKF+FsYU#!{7%&KfNMhpPgL5mUnsu?;|8P_UpXcs6w%UuvX29MWOc@X zK7l(J>+-I5EziFM-%T4&;@NOu%OVaCLj?~<{?u(N9cn#a;c8l?uE&0=v7_l%_%Y(& zeq5`WDWm-m8Z*=Ere}t>CqT;_kgw6kki1D3mqR_@RAW-yFS#g8lzVK3A^RS^~ zO%?fF^U&F_Jtd9x07KWE7Q7kW@K20T z!is624ZvvZraLGu6|>R)H8);jb)FcUhW=CWus6|HQCTbRC1Kdrg}j76;45wU^JQkp z?G>Rizz1<$Df|q)`5DNc+2WW?-lWUlt!p%!4#4bPHC-qGfD|Y2>XVq>M7afMXS*2q z$@=anIsRX3#Qq;{dKN;btfNg|FbDT=wc3yc*7PQ0PIsCJ3J4H&rj3?+QWwIUvZ8a z3_{Y%yWQy2JNv9jt}mt9ucE@qt`(!t-QBO8$_Ho#CaUw)eDMcIb{_k*(|i6F`*;_w z9@R-@qec_Ho6r5J1;vD#0^bGLvkQOc(DKy{+{VC3G%TC*5(s z+k+V>%IM3+=EV#F123YSuf1AV)Jb1=w7e0Tux1>xSF{3hsjYR`m|8zmdJN0l9r2Z< zxoc4z|F6zMh!j7`VOY_2=k|-lH}^W2L-JH;3M5g=Xya^A*;QX(Gr-s)A@E5+b(za?r) z!Wm&Ghr~c}#mAj9mln6ifOb!L{?EB>o=tl1J$*u@=<3dZ<=Xyg>kq;#ihx&#X|7w* zuq0r%DCZ-v(b)4;;4s8=d$vjvK58HPJuot4em1A}ak%($Y6q(`$BrlJoYYMZiY zSM!X2qVNW6IaP^^L?)kU)Zfs1Eef}H$ssp6gTQ{lG;T*pO66l*#jm+Wyiu$rhkyM| zu)JQRB89qhi@cBHNTE7M+1P?6LfP}F2=8qBFJ7Ei)lmCxU|tUY1hy;ntQGWkdAdJ4 zkkB?+F%vlD?>z8Z;?2xey)3<{9J3jW-D_n~$y`ILbeT0>*xkDttRoNR48H51d~*FIT4(>1g1hjOON-W2-Zr*_ zLj`c_hDSyZgnHl~O9taAAs0GlR9bGJWv_h&Bw-YSMPLZ>M8KFAU<=T~&?%7yc%|=3 zB0*FAr?go_CcLnoazXE@p=r9+3)5*|^m%fA%nj_H@3n5pq24YhCU{ZW^M6cJmS^gJ z!W~-n6&7P97OXTLk@24AAgtlmKVJB&tJk&}e;(@#lj`5in?|;7eC72mIthv&bNz(Y z0dWDE=Pw1Z<2mEhhZjKX={Vulq#jBK9a)+Z{RQBk)}fqxTtxE;L5X_nh9py7!$D?s z?0)J}?CID&>4LJCf_|rbDE8#_tpIm7wYrht5;`cIQG%Yh&i4OJBUXnKfYO~>*uV8( z&MA#bGf|#}FM(k$w0~0-(~HrgZ33Y0P=RaZDR*)CN(ETE zFm~q#xmHFe^;z%1LF7O&DU{&F(OP+X0L&DhJVyI=?!$|Us;^ypr4~>r*?=ggGjt)W zCrrl0mG&&pCAnvQ{L4NAOrY$x(faMUNn)|of)y&{S389dEnlF?2UdcO7(vnfe{<@w zf<){iMw@l=$46ycNgK{h$zw28fSG!^&MuAUmz-&fUYo!*;hFLT+9=#9G5$yfncuAL&sBWwasLEQgaBEqVF zYaP|Y_xrlYSOL#}vx`_cVE#qNlC@+-{BXmHc2Ll%TTDlK^wKjdQ)*71th;LT_&J zQ5h$9&6oxnAV$0i8qf1*D&8+rX9B*odUqkrUvmcKj0P+Uoe6;BD}Kw>0l1DIXB8FZ z1?rOwr=km4<@#M*CZSS4v6{=pS0feq?@1Tgt<)6w#G=6cT?B3?SrkTGgy)D)h>DF4 zxb26sGhBM47Z@`leCcXfzIvn5N-h_^TWCKdgs}q{XN-9$kCr4mLc`ZaGcig0z2nPq z`NwjBNTc=F;{Agq)8{~AN_SEVxvg3Xb?MKnJ*&f>Az6#Vx?IZg4vPO2VT8S1mehA1 z0Q$aHnci^MTOt7A@!z!+S?~>oWI^eAaq?!+Wd{7I(bsb8+}{#ACOufa@V7o|JBFwS z6B4re;YE#1usr+|j(wf*{M7WCJfIM^C^I;lSr%(<&nc?)h!y`}zZO%5r2e*x1iVzx z57BiTsbw&*?qK?XQGPoiOqwZ`?tI?&^WX74@*mf3{ECb=xB9@c>!VlWhl%PRgUuZ zew}f;SwK-aT%h=!O<7>g*0-~c)FX+fNdi)h*LdLtqK}T1hunPG zaQ3-r9k8PYvHK;{`VrT@o>xvsaq>gq2Ys^JPbK>EGq&ld1w}wv{l9k6GPj9cPhh|l zh1dBmoB&sj)l)9!kn6%xvD9nQrzOkH3_fN@Loy8PQPuj`?2L6!oNuxw>5Jae5fyBI zk65vZJ8xf~H6n6lm zy;XzTb1mpCM;H0dVW`< zGU(0XppGDz?^Vk8y2W-(t!xE_=eVoq*j9;qvHKrqCz+AnsXmrH7pvWdJhYkBP5m0{ zH?tiqcPO=kW0#ory~fqm6R?^ES9pZk|36HBoua+N~K81X|}t% zOGS2nirZFdRfv`Q$aRbt6uhnzBQmeLV4T_TR)9D8#i7eU6FJK+3(a)qJD?Nc@47 z4oQGyy_M}9o^rGHajz5N)qi|)FuF89@*uFVe3(=Wg&n?C z-b7M8zw)&|p2@$6J%Te5(_&CU{grxQm#WF*dp14}JQ~`0t z50aXzhvdnk1jc-Nkq0JlP4ADugAI8!*`Cx_;3Xsl>cpFr9kYlBRYp;3g=Cq?!p-C5r!w zc%MpC1}nC0=R;9V6=#qxcKV}M6HUupe`$B8ZH9TgP99;XxkhoDoCTm{4TK$Iqq5?4 zLzStha4Ast4lRYcUH&6m7H$VTR-<#Y;dX-p(>C}cv1%MykTMPnV-|E;;b8B%!aNWv zcSnQZQOk6_@G6QkR?C$;NCq&>6J7IR_e)G%T#ig0ar9i9&4(p&MR+r_?|~DIBjH<}(B4}WlX;-|ZO7bYrVaipj=&Mj(i}eS;GDAvi5213 ztOstKc8+;;>A|`f1ORyaJ@j$6x58NoHW$drqNa;!DXs$*me*H>IOKEQFfJe=#nmKv zs{HDloXMeZ<>cr5cVme7W6`Tb$TjeoR9q)PBkKfO2{?``WYk_s_%1+#N~_eR>Bj#y zL~JL~0wACW-AMA*u+H0)s@sMAoxO^$+Fz>Zn$(ke^Fk>#$I~zJA**O%@@;}TbjKyD z2x&EEwY3>XuYG#Ek8hTTav8G-EI@a}6YrXT1!nCzW=2T%`O1;qLA?q$y~>R;(lSVRpCsBexBRbHk5|tio5l(72B? zTBO7_4>WCe{NQEON~5VLQw!sg8Juq6{Oa&SN;!O`uC%2#&Z$OCRR~5^M`{}y;2Qm{ z0o%G;cH%4T!tvs8J`?bmt?X3(j=rPzTtc0HzOa^zx7G9=SYP(6=UCG_Rv*0`G?=$V7HFW3XG8w%rD@FR@v#_?CCoyTPHx zrRB+m2~Xx)nO7>M$+Fbx*q1@U9#m-%30LY#>E^J8m!y|N_7wSszL;@m{E5|yRJOCT zU`(ccncO!JeP@QGjIp_ba%|?X!}Nc>rytLu__c6&!gsqY6j0)S!X#J;f9#9GtvetmiS6|bvD zcz#ONSzIVBSUSY!R1fz1q5CDFBBhIdN?aH*xvO8GZXql=*ntA)iM&+jZqG+`V}J2U zDTIyECDtaGKg>ZJ@lS*U*%j0WT^4$ zN)w4Bk~hOiXhR|PQf2l-`E&UT#kCax+`huIk}|Y7w5bJTmaj2gvwKgIJp2=qIE}j! z^rJu)n`VdOZHu!yf0rX zd^osR`cOdXTVm^hmV_5@<}bdI`J&?dX%$=ij)Qk?f7(c1TWfV`P!J?4d)8@D1A8X^ z&iT=6{Z&Yb-Tq~-v59q5czBdQ@E72A?F^EeCIkoO&}zScyBgXNf%|EYwt&uBt8|Da z`AEs)!P8f#w5OPau0ID>|IgyI;F!H{&lSU>)gO=k4`VGIj=|Z>O>TV@dG?5a_S&Vg z8<`HY`H=6>$*XE7>9S#&x!hU)j^qGJ-W%{q_)v+{;maX5K=BZp=UaL)r-&+l=37hR zk**4@3r11erhO%=XY(j>SYW`zPB=+de5;8C-Yz_|YtAFHEs8-DGS0pNDC-W|NZmR> zBytnixaa`;V?PErFF1-b4AM(a$V*0-24A3Uj3&R{NyyK;P3^?hgVGCR`7yvOf-MVO z^4sWe{OPhxKfy^;B%3xG5U@;U{7j>m&xT6%KzsiApJ7%!MdBFY17_-LE5BK({H3Vd zVu8cYca0n%ztBj?sRVn5uF{TX+VIK}Ag7RPiO@W5o7{ji^%$Q`$M2pTd8Mo5mJbBM-?$M7Ack0}$ojSPH2E`!;B{pi1lJ6&rj*|^s$vN^c zACoZ*;pJYOk{?>1h+6xNCutI>@&cbrZbqNv*u7THF4<4srEsYPn|c1! zTi3#UA;tViQ=|*#eR)mmLd;V27)Ze3r7B7t0g9sqO-bE#H5=HG(~*2L1tK(*q{x~5 zG&~OZ^y&}rYDRL0xnOS65o1n238Qp5rrp!tN2`jlMWA~bhHs>i+Md6o7$a|MAQ3c#AV$kM*Ine|79oI|r40*CyT}Cs4Hgsz|c5z3Ovz!a`J9d`CjFUJ+ZqXq&W(D%?y^k7q^qIDLQ>sMRPVJH7Cz1 zSS04USLN*(oPmX>8z!*jI41v)&-ew^5~0ri<~mu#wC3{#&&KXG_bamn*4tgX?f$Mp ztw|^;FN1fcyL?_h#CcE>iM!Z0F8omw;x$Z>Y?W;CO5H zMP>Rc#TfolA$zA*v%3;u+cBiGb=k{6)UB%yocP$E3@NVHjC7YnOd>%|OCm;1roI88 zYnfFutiCs*(`<8nE|PJCTsj<+=n`y-diWD4L~dCi1a@V~Q|ZSwufDi>f9Z?^-(tl0 z`sG{Re1FG%R)!smv<_64vNZ#HiuG zZqH+Ln!ekTnN~6@qoM4loE&|ts2kV51sPY~^}TmGBHc{p#zODZ_f-x=-yFRsbdx$i zAgVQ!d@kQUA|H(H^jS7ISHeZ9fiOuKe-N^Bi`Vmj3pLhof1WOtqty|n zeVle{K#6LLNc#=ANl4c$Bw|3P(3`C{(XjQ|Eiaqq{8Vm+=UIK`e{{?{IoM=nm?(B3 z8AExCe|}puQ`aGr0#V(iUtR_~1Ai!uk7l(WTPMF2G_-|XpKw{HzU8Zz!Sf$}^Sj9e zP`y{kkGCpp$k9%jgxM&o_6JUW#->4S#{a4KU(5;VwB(Et3XLXP%47*Re;Ha}#aSjM zKoL<9rC5sm*Z`LD(M?g}!oEQNQww3IrOth{C;7uS&AkwkM^bqT%903T*HS`T!P>;8=fS^ z0pxGw#fKka;^(SZeBLkfe=OKWk0N77Tb#g(%|Je2(VRj|gx@A<-j_-zIE2LhVEatkml=-p#ZhrDL`GHJKgM$Q zNei!#n8uH!_y<7UI=!wtl)s8Fjk|m+WB1;<_{h=Gy~LS{Kf(=8|1W1S5eXK?lgt@E#2E+izOcE?wJ`e8Gdaq#xqo^bs+-D`Z75tPTWfKU1bN}0r=t;3!jw1NyM5=Hn z>kU$BTRnDLR{>H=l6K2@Byb)|eXQh^doj5}GvLZeKWpQbTv+J?GA|wU0b*BS9UXeA z*lqNG8)_WC1|vnG{eWsfCKt6wL(5MV_89xlqezt%tmIU)b%rS1oY#G~5EOH8)9U*; zW!x+h&gbEB!?m4O0cL1IAQsD8_zS+yPjCFFVB>hZmSY}#61fp)qcc_L-qwdd-^cwt z78G%+!+7EX`M)kg=CB4XKkWHORAyT2+HG0(LZ=F-{bN}y$WG|rp#f$KeV4E_n9&r3 z&V$vhBF(H1nCI3X%FDfMg&@6M7^n3GrMj3=OJ6%8)>(@}P(L28p328P)E)}Aox z-^bwDu(t2)9NHTxP5cJ@0iFnti!3WPNJlGyL5ur%%qx&;5FAR2UKV@P!*aH7!T~Gg zj6m{roCFDPv2jEG4N+aH11GLQZhG(263sq7_X<1_+YU- z5gGS}FflBjp8jDVz-^p01{3sdDQR0GsD+7x!OkRULkA=uz6l1|uky!T0gk)sN6aRX zG#RzE;91iKVVXa^s)OhL_EMC)iM>;C2k=ZPH2mKgL)Y_aX;W}t@j8eBxfZktcyx^kxrdI~KW3IU(UJJ{NW{qVtFYIR-I?Du!NY zfBtB1W=h7h#S{(n_$Wz0s|X zNUbQjY!-1RZQBdWC<)sFYyQoIsjb8L+rr)cs{Wa=)aFQ>L6_D=JtlH*H0Jdk2{85W z#YX2*7M7TZ;q7rV2mG0!G(|WeNu0PD`u%gDzX?i^(iDEO8KeRiALj zWcHS3D~6%NT?c+YPLD{--WvvvPz`vvKf!6tm%W?(m4{$awBX7XY>u$I1Ey04NDH_0 zEFVP(_{@41MM(SMd7y9An7nK9Lb$;yc2NG(T!#ep1sw8gVtHTdBxE+7nAGOW%SWo0 zckhmK_P5Q@-QGe!^Gb&6E~07V~aVg}AU10q8l-!ez!uyH)zdihTEp9$4 zzYTwpS85wUNc|!G#*Jpa@=YaoL*+o3f#&s=APx{?kyLm}Fq{=UhojJ}#Nx1COMqfw z)v=p%XyQUbp~BqN4<3GkkviT2^`TJEgM0hx*9EUd1A~TcC+-y)Vn#FLeM>d?d>L8p z5p!3TEF(x=q6(yXY|q!>e#{PZA%iEy5hDc~kcuOnxb`K4WO75{`BMwBj^kLs(bYsgMpF zK~!~g#tg5Tb7z6h!Ho{*^y*6+WWh6Os2kbqXkSe8-Y#)b8aq%;=J0)O{E9&J*KIR* zeZ4(dpn2=S-Jzr9-Ex-PcLx5HcdW@^X1l$01CUDJ)Ypakz|5Hz=lGI{eSm?y+AcGt z^ZGb@NjblA-x%z{XajnLAQI944CMsB8T!+)4K!98%p5a~2ArIb5UgXOs)Jjzx- z!YNHYZf>zchT2wR@@x(z3zQ*0oTMF)pXgZW9luwBn<^SD;y%>>SpQ2iBqz951H@c% zbo3#7hih3bqmEQKyPdUJ0E`OXG`=Bz?0b75)?{Y3x|OlV!N8OiGj@Ykvg z8~2_1u>K48JKR;iA46%SHSA4)O8iEXm1&SM$bDTu6GxP4{>ImOV zdVz;ihmA^d`w3vkg4iB^|GI1R$@lq3UV5Yhn#^i@@(J%&k%K>3&w=I5u5-83Q{y`& z{U;$6CmzyWtSfa?W1W<_?GBSKy&yZV!uOTsZ?AhMnslU4rTpb*sMHo3S~qn0QAj7w z(@cj-2uzAGJ`_#$@q!_v6c@5P-rRkwa?3~Q*7yYa?fQ2xmq+GxmoiGx!`+7{D8FC(Rb zim!*e(#J!^f^{jr+2h4`l}caT4Zb!19fIPn=xe2y{Z0&3$Q2dMfXziX-^2`h5;|0w zztxiz4&Im8sGXV9cYVzgE~?*|n5va-dKUxp_41wRd$$-oF}nNHF>df~@3ck5f!I6( z?Gp!{bZa0q#e6xr!aDTA7cO`F{ucN%4Jqn#06;QFbU~*jzV$p=%;pe}svXuR6qfpJ zjsfV!pE=y*X-J^CdgpHoo_UtXzO>ftZA=mjKsrim9ynE#qD;z0BS`0s=+XxDlFg?> zr)t_JpckrM-21*dI4Kzg*5E^nYX~$E0_1`0jIQsgd->h%1;T!85z|G>8}}*rIepvT zy?dUBJ#E!BjkvBD82ga)!7p>~)0R`gtS%8>THI6Qnq#Q_eed81nuI%PuoyJd-WPxJ zXTo6mvM;sx)0BrvgVX5%UMc(^OTct^`#kTyRl`min|ZJaB8U|}d>j2IaNs3qBPYH9 z2x1B}<;>Kwx@p!VeCAqVYTF^^aG1+sGJ@3;!%XfEcmIY;@u1@2zx240tzBK+s1 zqVPg4CZWB~|EeoV_RsfVO!UQo#OUs=ldp*BUao#nAAMoiQTm*`TjR?0@M|sn;v3z| zZY<9!guws!h}HfHD}zQ)d}+%KjvYB_X5!YW$G{x7jK8f|lHJ50K_y`VKS2rkUl+J{TvQf!#{3f!nm>t3yNH(cOU zD$Iai ztL?h_4^5R%k#gPC*p}djiG0#F>2H0;6b*n_yS1hsuwo{$`qFzplCQWyjD=1|RS5Cy z>^8P&bQRyU?Y%W`1oGg%^22#aZp@^MqW$QzMT0q4P$)L2#(Dbeplb-$UU`Cb@ec}DR ztoYcOlzsI00F!3TXLibG`^AhY9V%qRpTKrS4l8_F-jo;1K<+pqDkg zwb^Lcn{6T-1GR^ij0o_`=rKL#eyomEhmB7PMJEB_ln(feB*yowM4Q0CtNg1XcrE`5 za0E|R6aM(J&)3UHnvENg4gmg3snX1AYj%X$JjrDyNjPBJVcFBP(!SMA&-xWOdaBBW^P{ zB2^MeN1B9AjDU-+Q(YFp4c8m~{(HNQHRnwBqc%xMTU)#*;%U>FQ|CUdActCBcq)^2 z*LiTzTc~(32dopTvK&?hgXG}|5f})j$7Lg zGK}c7-wBMxx4u&4BTK#9kZ!G|*pH));{HYqLa#I%r1DLt2Tz8Pg-3OlO+UEd#|gXq z9()%6Z$mrjl47lb4$w*CSolK0_#IDFT_sdgl0pr@H?JlwZ(^t@FFINXVoNYTQcPW_ zfd~PXbThOcS0xTVl$H~|PWpXqtb2l@5Id~W1xHd#v9G^it_&+HPABg@{<2)y1lb() zUzYB0X*zSB|E=ny6PB++aX$>426@S>_AvS(Gcz{DW!(P4< zF?8Yi9Zk!)-tyOzMn;$>JADJg&N*(2TRQzzljvnZe4EG7ney+FZ+eyU7)C5NPq!y1 z{ZuG9I6*wtF;jtDB0FM8AF+E|tHt56IvM!N(;sk~2i#fG4CP8A-ted&jGTVC@kv)C&rfLdmg4c_NR2m8G zDt!cqTid4+=zGtAzXCva)AiR-riO0qS&G@>Zxg~`#_r>;evi&GmEZ8S8q=m=!}HC( z>f$pPTq|^Y;`cQR=Mvg!(xJ+wbrW$_#jD!*gDD(W{!PY)!#DISnH@ z)-AL?Q_wcx=SdopSCNJ&+rWV29&)87($ss6u$z1DMhG+^;l9+N66W+2a|cB~_36`< zZa4B%kAVH-!#~UXZ`R7YPfA{?oWl`oFsjTJ-Jfq7n=c2{t7-CP!^IDD+4FTN3LZ^O zL$@h82X-g`7WD9RnEEPVZn@(DB?1@z(Qjt(@S0S9HujDAZ1N+caVFSTvmy3#9U&3} zk2AD+Y`7*#^@Wc^#iW3` zC{QGD@%6l}hw?~lw3Bf&v4$B-qsodRrA=fVj@_XmgA!<~`jTmNXFdEQ-D57ED!-*F z2wXr>f4sFy&Gthbc=C5-vU13PFIV0i>>j;uCCg+>$;y2Ev4}phamkt}#Y#ZT$^ z%nj-11$m52SazNHB`ctP;ek&3v)_#tSFS3v7B_O1UxHTmk7dnH>qDI#)17C?ETR9PB#*S zNdqm{+k1neJF{Xe_|W2PFZNnd$i7QB@i;)U{k#v~*Hib{`l~<|g?n1o>V~ug5{FjY zp~~CkTyK^ffkcqFT7K6%E{fMH<&C9RI&R*(2(L3I8!0}d0)@F!5Bx)ZT0W{m10+rb z0jlLqUc@&Dl9tGnx0%+?-a9&zbqo%WvQ$Vn?O|#psxwtr#ZAT1!hw3#QkoS*t@K3w zXgk^N{i&(#H+;VIo;s;C0why9ZsaO*_rlM6fHXQeBx@N;V?B6g+ifatYbrDP+O?f> zYYPi`u8rMDm)b^8DgRxs2n!qfpaQYQg0*6nu^#G2M2()0HC%4j>MbVhUSo*AIU3}jjBN*rr(Tvafj-MAJ3(-t0r4Oh9@hDo|KQ(I)z>e7M&aRIgVj*U3zdqkziDDs`#fD`8GjkAF6SY5 zJOQ7yTSz^X)H?M1V20Yhp7PG%Zx?@i!n+<0xxPIj!o>YzNY))^sGWhvlQ&%Gkz>Tf zc4K+b6X#;LtRW!b=or|exBL<3)ls#Ux0|kP(7RTWt@oL6H>d^`2No9fj%wTZ$Ypm^ zfs?e&%TZ5$i(twKy${Dbl!x9Hv$zGdbut(wzJ#=Xh-b?uxci!Id&Ki1ib8 zJ#z3AX-|LaE%wOX{Rc6&h(S?7oVdID`cNKv{0Du$)0cg zh_VO?3ef3$h1zG0J#GAR%Qr16z4_H)>zP0TT@Q`-gfx>M^+R?^PYyhZU`mcmEzv)w zyh#Ry%-_?unc+kvT1e#ljzXviN!CfdYiORz4mT~Z_I}lwp~#%#v8Zd_bmh0fib?t> zrYx(q3|uM>alTzX9m4wD1x$HpOVR^vCT5kf(#!E-Pa0=VNd9OaPKb~6LJ`wv7@8L0 zO$SaCt0aVz8&^hY7EM<{gy5fVCu}?=bT?S})ReUI3m)i41G-pUw7%+0X^oy-6$d&0 zUKmznvV!NF?mgpCDTNK3KK)xqCUD2TQ~}$KXng+6|x1Xacz1=;@%KvkGRU-`UG0{g9oPZXf^iCrKRTuS?DG)qZ`jyy`?x zAtq;U(Ecko?IM1?24Ddt{z?e4UwtVf{+Q1xakqPY{Mn~=hF3odZ9S1sQ&yJvZK%R# zRUSO!Kaop|lAAkxQg{p3VxJ$K#r>B5! zJ?Egu@7JR}=*E2}S~^$N&tz3Uw0J&H;*^tn6*2PFJvrs8vu`k;G;&^$+9cdRQ_ChL z-);mG;gFAu5TAkR*D9k;hfAiJ4x&$y4Ee4UoeihtEG0-f7@{U&4TN50DAP&phUBwh zs-&?JeXpqi%xevmyi4Yt)4#38<~R|h!JPH@mO~Seea=Fw=wma?e{ro&%DEwe37z&> zqM*<)SNF!sUti5gk7jHWAUX5nXItHEGU;4Tm0tF+QwHPIaE?XCtl88@FJ9(pF-0=L zakd4+8~BZOu2=9eda!dgw&F|9wU^Tv)5lHc>FziIMEy3Vgt{$j&bdVY7~N=zB(Ybm z9Yn`dw-p-vkqE#q?5CiCTE^HO(U7l7!}d1Rel|!{YY1gej800Sw6c=blPaSnfnfKu z)OvQ~&g7jrv0oI%;OP+d)z`mEaRWNKr!TXa9@nE(AtU3Qu784JWIYj-&$!Va3j}3o z02|%mT1=rgqrS*>D%cO+KKRi?U%pzRL9SdsH6GgeMd^>PflddW;G^V!^n5(;%@OQ| zg}`H>PX3W=8Yl2ryWlSG=dsA-J9155PYA}FVZD7my!1+`lB3bFPFI#jCbVp;dVKCn z6xYi`$y0kxTyx}1Qic}20`zzH+c8CLo`*2RY12KoUzHpmyvg;~9m+U1=S(W|Dkp87 zat1Ye3~Jj`aLStWu8t7VQ`>S+bkB<1gYkt{k#eLga*@Y!UiLqfB}N0F@o3y2jWleo zAca$)1vSlko~Gs^Y_Yg$SbRgNu=`n~gYVTE0tqt#P>(;j)Ko=UkuJ%cw6N`3y;DOL@|1e( z?D|IE4@*(LW?fbn^+Z`h4SI@o$CPT*&Vu0uHH-H)r3>q>Da{{x{f!^}(TmXBP0@ui zsA9*HN*Vp-vOe1n?x|at8c?QgjyK@neuH0WIKuKTUn)8w-TrU}@V|84juf>ee+n5FmhZKf2Uhpi<1GEl?V#M?73 zt8gH^kYois7XC)2$0^h34c0j1VBwJcZt8Rjuc0~b#WB*M{^PCVKjM22>>(`1;Kb8RfXh72OrHybGsz4pk)&#zL2l7mklnWcTfgAZ z!WR%zJd!PcS9jPI`aNKEP!vE$zpyINg1N#(CKQ<=fRF}L`4qad{2#7LHdPAv#YjYm z$Ds##{@4e0G2j9L9y~C2@z@D-aJK8g{QpcX@*bd+2&t|(bra&t=^4zM(c94 z2A+YI*TpKXqeQq4Qg89|v07EpR?|zvTEYKXC7^N05M(u>%Bm=hu9EaFjr5%Od^oIE zQKYH;>NBZRJwRXSaVwnW9+LxU4;FKOCOh$0A3E1M#Z_(%DGE#IA$Etq`%jN9cQ0+> z;|lIT%z^UjfOTDWzv`I5)PEsdZ?TzOU#R!L4Oxxp$);L0a!UkX6`-BSOm=Exo z7eq$a$${rnWVokAFmZ9MKXs9B+tkpRB7Z;UvQ~5#$#cO$j3Ea=Qz8*j{ewJYBTKk_NlmbFw82GE+zls zp2N*e4#;86#r--7KnIS(m02rrNC$)%TZ7XC(LGBXk|yN~F3rFR(-=7tFLat0iRXTx z4z1+oO88@gm14bzbIWx&JtlP+hn&?st6PXW2qz)~b3f9gmEd4Zer$321oFzg`Z?@s zEeqsS7LZi)++;_9|CHSAn`g-)0!JPI^v$;;O`zzoSGXPBl*18oc@ScE%dv(miBsO1 z^JDm3K#XNPvxdgfxXdg*L~}7k>G#hzvrltkKGGW$Xd=FjC=I_O>UG&H_fA$sC1}<7 zR`pP)@t_vhUmxp}2W2fuQ6;eK&_NdIxsaKtF=%eR?ZveM$sFI^9yES+8RpP33yv@h zTyZAdRW*T$<$`u@S;UiBEza6|uLGM-f}NavMSSn2Y_gdzrF_-74&MlF7)xAvG0uUD zix}er>*$syUVhIyngV|DH5^$i*dAGNdq)Ln8nf8YWzD6L9=25r&v9`g~eQ1ROrP4=vVygMo8*_Mb8W8Yw1H z^o*KX5cxO9?zy~aqj}Eog`^BUJur%_n!J4ECzFXCJ?A^pL#ZZD|O+3_*Cv!X~z z08PC&{yf5+xek*lnrUdutm3fmfO4=kViT*CX)m#e2_w3(L1+49|+MD?~_+@RA zK0^M@n?WH+HR%mm64@UV;1o<4Vv}pM6MH%J5WXFzl)dbgprL!&jI%{YHreuul<_tC z5}TY{v8O!8WM=b)6P?9QU%B!?wI1yQ&8LR%yykitDIvhV#&>XKDckqwG=_4d$Vp

_47fo4@Sy z>cRSwU8zb#N^4adC~NZaHCErvfs>q290^+Qz8qXPsewU`>} zff(%j*sdY+)|shohGB-r!8v+K9Zqfao-UrgNp{YF>~-VEC_*)E@B_W#L5J5Uc_{e! z4_?u9$W}~bRSc;%ADp0;QmT~liGRs|b!mQY`S>5@xH%d$)!J6>IZSaC8MJ2^59e<^>vB?*>`&>kny^mo#KEXI4`KOg$weab+&!p!|f zwz#Ddi-i(PHrdN-2mb>lj)4Eb9e>GiK&cdvICtlB?|_%knt zOo6JjwYQuU=BY19leXE$5cr*u)7ro(ZmuT+*pcDx0tQ@gk3goaWlxhqmXHWqvM>IM zD9uCIAJ&*sI~2axW$wXvOiT>!VDR%S88a>#I%nkY~ulQL&qni?A$>N@(msI(k#Qn~`oO7P=PeIyfKKW<}a=`VH3 z3435@C4JQvL|Fudc;#p6V*d@mCdkleP}VPxX;87t!G=lBwK}!6WfZFZbUmDJgI#vR zor(6pM=!R?Q_VQ|X&KED*Nqs>iLPlI;e$cpC1zMCDM46mYE28)_ zv=;YI$@RoTFTuN9SUW~>pw&(X&6wZO#sI)w?4k2qsUoBslw|ve!D^xfpqmJdF=Lkc zldtjQg_Vbrq~=l#oWW-lL>ezfO_Xw>5!M;*0EbE8oQfvh0gROa(tkXaB8cp+7As>*ft)k(Ex z$Z@4Hdeh4&;4t1~N(5TxNh$&@`Ab;$^s4sQXsjUcl7$cHc?$Ax`Qa{GPA5ted&mBG zKA%*4EBKX(fuQzrV~1BcnWq}xc7&r|y_q~3G<<_&vpwR%m0o>zgl)*j^t)}Uac^Cx zmj2HluHzQ@mztQj7+p=5okJ=w_AT_OW}a(l&@~6^EiA%Qc?*k1Gb1^NBu71hXRgV^ zK4aEfuM73sYe%;XRQ331!C342db7g>joZLF{p^8(oYRr{4SM90q zL4k&;A<2^8@5BEL@h}_Lll#e}w z9S18N-}XiWE#bx0>+IzLv13j{KjQv{%qLA}-|L&;c0cTrCR@K@Y8B6*jc>>P8}nrv zk0gm0=J{bL)#dj!FLrF1|49&qn8Sv78zEhfP}7`f2+$7?wz23vopZ!e%jQIc(@?N8 zx@7ERyA4ZU`Q(+8C+ujKJC{4SIt!;Y*9xyb5j@K;t&PFfmgy~)qEKV#EcjiyHtq{@ zvb*)vFXV3dzQXDUCo5!?Zx{7^1yhpvPv!P`mQiWQ=a4E$hs<<_(%u}^*pI*jEd*hrIa~@8$u%<^HE9R;<%Z>NUf<+`aLEx1N65WdZlGQZY zcof()U#lfAedJ4?B3o^PuX*MUcA4#`9G`%_6?)rW%}nm>l=eEi*Ena!wOzSt5HAb3 zJ&Fd9h4?#)kMkq5k!&ZqmV9Avvo-gHh|wAeCWrQE`>DfZIBu+2&oKt2jU7eeux|ie4ddUTg~PfI_R_`RIS%dJoWFpS$ce1Y+P*CZpJtE9P4!7v;^iD=bCio z@4Vn&xdsV&{uT}c8#(Br9Tve|kcVcyA-`f!eP#OZ$}6+h+|`%G;j17{5qg^>UvR%@ zqjN8ba?h4NF=6qdB}n8+Gx`kyCt+=Srg+>iX|q*~r8r!C!gE(de8ke*k7exP)PS(I zy3VV}23a&EqdP`{RS~RZKa_Pn#ZW!69R_`NeT{(M1tBZ`zEK_=X0~ux zHc9PT5KF?CQ+er1X%R232Fb|81(#1>qkD#7%<^Rb%Y+wTn;(Cn<|h(G z3lUjEDTW#uma6!&!Fz)oqzEP27mSD{*pa{DP6H!O^xX$k*1odr!Zvy1KMmAnJeEwY zW9ocZXE0u-b=tw9SSaM}>Qq4#^K*bkHA3xbiOt?$P&jI>Ruj3XvmwanvtXeYbz-#X z^mrvCHJhpu|1w=Le{cA9W9?HEV$C2vh#%7X=eJMMXR0gxsoUbncYslQj+5*=Jq$Kj zm3s7Me6OJ-8^`mPH{n_QnFfP6*T?-*6ni4a>g00RwzMwUxn8)wWjHexmHonA2_v%t zHy=q+Ok5@d5SH>)hQX+}GD@js%&@|jEJk#?Xl_nd&ecOps6e(HBU@p5^a zrH;^iK82^T1;F(;e2-%{i>`O;?o8S{_NrPz86VZWT+qKnUBuQVq}Xb2+vWzyFNVXm zj>jUr9w1XSAUMn|rroFVP54VOEzPH`8Xc=UhO$F{OH$Ba@t7cEjaA=I7M!sN#`9$6 z3%!;hR9%JVdm;-6KcFjEGKIZ_Q&xn3-}OauZ!q|jm?r5YV8t<`VnRGZ;kYny_-hQx z1-2rOH2A#u9%KJqeH^D^2i`pX!_+~1)Prm9r{h*m8ByMTh5^PDv&G{_U)$U*kKuI< z3J%Eyvc=Ciib*ro)iE!ankooNExfm(JmSv1LBG~?xc}JTSOEHyL?Y`7M5a5nI~MFS z&cy{RHi%K!c!MVIIt{7r-b5Z%(oE}&XgIRo}u!++RVfTl>DmoY!(KduiO^ak3P^1$cZB&HDdg>Fwj0 z-v9q`ojR#^$|*-G7weoZoDvdpx$Sg1l;Ww^c&9S|t?Pj)df5`zfk;Ri#T2X8^t}!TV{u~;^+%h z(-9uXuUV*l`OVc}QvJAjGc8`8t~#VNUoB2-(JzqTuMW*LHPr(WFgq7lW8tAkaiZ(P zB9M6t@1?0wtC81^xc7_#V8RGPFMsD<9M zp_ma5U2M{`^J=-F^TRG^wZdTGZXQMb!IzTj$F4=iY5u5VA7i0!U}zj{4`uvT5c&gRRjk>OW-$ z(Adjgr<8p;#ER=Q!d#{F15(hWf(o!(zP-Md?BVi6`%~nYa>b9~CohFPE`}z{gY= zZP%#_@etG}m}~zvo3eDO@}wJQ(Qj$PN+q0z`0lCbq=dvpccfgweOk0ui`aMA8JD^J zW$y`kv_U{8M7Y6}tFO+gVPMy;xMBiNpZb|GGVcSc$xVrw&bea{>^xRG{2bYuRSeVL zq0^pjU`&(|Y=E(RehG94;rnR^qD)=+=Nlv51O%vy6CQL8w`gr$3?tl)V(XW5Tg$F< z*YEmAo?JNn{AKVIbyUFM_Z+JDOZ96csg!&w>eK?qwR!}!O6QCynTwR$^)xvy9X=Dn zES|GQ_Geq4IC??-x$R3!{o+4xXXu269v*67<21(3{Bxxy`n334>M7}fP)Je`2L3Cc6q&pOJ1;J1Z$vp^03I@h(>ASul-&Q^kim0VtV@y^k5DE-ioF7h<0H;1-iU0V$Qj3fPTWim zsqXL%3BCUPl2QdG-~Ely$`1f%9ve95=k+Z`OFTDZeRlS`hF71_tp?(f1+)ocl824v zT_g9JQ5Ng~Y=Co;6q%=yH)IR0I#{kf?W_6L1e5p8{Uxloho`7|9`hZqKg3CvvD=h8`ODSt zEMucZHz#F^7@ZCcE1k`tuTtHVzX*)>J3}6ac{p%=PIB2M$O~SUk^AqgMShQ|4LT4! z;#@DLZS&i}Y9yR$IUOKs5>@VZ)>4#sqpm4=;Od*x*a6(#`3p)FN!!tk!`2}w`}%f` zw>p$Zs_V6B%#*wn*CA(>HcwB22nwf3Ajsp#aZM6Zc;?ly=&>cY@;@|Jn>^#IBq z77c=utXCy8aOHi;mNSY1{VF z&YRzO$R=sO)XIJ<6{mgCWNa={?w8`c6 znQT-k?(1oN&vL)nW!m4va(A=32K>IYZflU2g%a|-V5_}HzC2)2&H?^h+8og)u6QU7 zISy-$SqIlXZ4FxO^f~((QMfWB=!;u+Pbhd$aK)^|)q*mIzUiU^L!Bp872b4rl*KGw z(qv>%H^fWnnOuT)v(|;#jaD>)3jSA7HP`FLmP-d^|44Vm#&SUI!-z8DL>+!^4bL3H z8MP|OnY5SFnQ_p++RTv=g;K>APXEcOPVeZ0$U=X2jQiZ zAB(gSXw0Z5D0L+EmjxlI5iiZscJ;#VG&CJj0l+!mU&2u(v=BN~{+e!y7Ssd4w9Qs4 zdIq=u4)$Cj*e{=}qe*X-M9Zyj<(c#x8oIuDC>%~hEkR|MqDbNg0#$L^Ql}WEJ@_(B zz5VpM-QwQ+c^t@^w&nVw$yelf)VZmw*coT^lHgeuvcod&ipv*)cbVW`WRwGmw_X3q zb2<-^yECCxVAykBe{NqY)Ww<8?WEj%&);kRyq4pH)IfpZvWOMeKp`lzD*J4`?8n6JOjb>t_o{L`dzn|r?u8zZthc2gm@ zO8WF@R!9rsi{_eG>fEZos_)dZg7pyc__PwMI?CbQanYM^A3V8_m&)0^hZ}n7ee?l` zx;g(3!C~=Y+M+z1n~vK}8)-%y4?2K2ZG{Ft4^WD>UNL^pdog=>zSQ}StTu{O;7^IY zn(H>oYp9H}&OCCl=3f<@-71bT<5?-t>g>VAhOt5TTi;h;0Fq=l{omVZS>Bu$(q(bE zV@^`vHH)LG|9r=f6hfyIH@;U_d(-~aK2mMZ_^nxYK={IS^enr7*C$Z9&PF!}4GU_H zMf#a=mQempx;V#4V)4a|oZ;b%120a#yh?v=zw=R1S>3Qq+h(kH#*ua_Hayhjj!9_! zh%tcPNO0t>ng;pgc|+bBQ*(`aCP!KKgFG!R?Gj$}*1BIV%d- z3GqTm@@{|q7z*o)NGE26^_vny+%}p~1AcW4IZ^qm-m(ia3uuxRO{6g3_8Iz68dR5_ zc8J5o__YcN=(j-$y6#RM%9bvo%%{Oe9r=6!A1s=AgO^OYN}nSncFUKH4A zx+Py|AeH;<-R~K&Wux9pIILf4pWl<-%a8iB8tY+Pw?f&_Lr!t?A5;V$Nm0Vo$FKt| zOMOc*dqa@3EGLOz&Ttg2C`N`g@9Qh*yN>&MOPDWnc2K}xW67+?74IkazW5_FZkz)) zjPPRi%KV&p$#97%CjzpPRWh-VnU`}9P0k6gs?*rvq@FbU+IR|wqHp^MaBZTXYC%J#!II1uA2wj1qw9U5-69)@Z#$j%ea2Aav8)SaM1 zB9ru_bEazN-yLT>J$niRm{&My9p8tr&1t2JzkRKjIaBZx-)}ZLOyojX6PJG8tpgZt zs$Lo#suRd1>pI1%V=$$X1W^Yu{S zzYonN-@39-{Tp$#TDjeXPia$sE2$SJ4O6?}nywaOL2(8ur-{45oB0fi;X0d(_u2Xm`B1VN zdb$2>I`Ai7EMp%WhaIB@Ojo&tERB#$+P_`!&eHcV9w|?z>FI`iu555Mz_@EZLAk*9 z3$(k&hCY0`YU#U4G&W?`J|no`!*6is7HKJc%GS9U?U)xhKh$W_G;Zw}v|rz}T3YHE`kwYb^7|%5h-RdO*h7NYCsn;@Af|+sp~T(4)W;SvLrR}- z94>qF{KAnNaiazES~>SJ0Hd~3Z7;jd6z_qBC_)VyCFf=^rkzd7{KDQR6@f+ns$9$t zIYiPy1Cn;dtALH)1xXn2x2Sh!`Sa&Kz`pzeKzxx3=ufrerCaeb`OEZ=G3I>}bFakq z7ec^h44sN0rRu$e-(Nx`usbzPzld$GC3j)MHj6yGkeNUfh1v-;{|5hwUH-WQB#OqM z<1ceL+_xcPXC7ba|B|{rnR?>hD=)uCqdwRVBcLg}j^f&Jr`(VAWt}MUYNXX)foF5B zeGff90G>I#9By&}@`^YyC5x0sETTuA{i`ypP7;}m{Mey@?q!wOe-SHi(xH(5FJa2X zqJGfT_EZnvDBwZJvT2^5fEL_I*ceau?8qSE!1kRqA5fiRA~}XbJ+$GRzkHQb;MH0a zFe`)1`WE(Z&YgyLfEA?3S=2&lp(2E=V(!|K2bO4@2_6?p7fw7xhod^o2i-elG=VZ{ zl_y~+{>T1p^>g8J28s_ii+vS*ytuls$6lfeg1JF;IL<9;5dxQHNW(=^KfW>|%)Ut& zB!30SMKn>k;YTSCPg<|q7&zCBBhjVeHBDhe=}^Z|=tNK~_xpM|;%lYE&GNHZF@Qh2 z;4tssMSAAmmjfOnrJNn?d2XCbfYT}=Nki-6AdL>u23g-Tj28&GQrayNR1GEl)h_$dZ`6ni9M! z3?!zT%2QNxW+=5LE*?fNQRVkSwkNl<{#Dr?{A@#JoQ^%=(fy0*LiT1Td|JEiuxz*D z)l(A|oo|lp(uity=DYWjcCZomCtRVbu2 z0&bm*QG~(^y%&@k2m12p=L$y`7@@w4BV!39=BDny7Od0b(-lD)zX@*zgJJ*`^zZy;*)%yoEu{pkgoZ63#VR>oif z*zk|iI_?VpL)6)>Nh{C}wX2s?TRCJ@C3e^aRo-8z(Rj@N)^_3iG(jZ$_E7 zCP!$0@jud)v`BhnwwAa%d2W8f49AnF_h8p+bmlL`2Z!! zX6vp!8ed@TF6M0USb+WdtnrhsYlHL$D;`7Ve3dV|`RR?;I)NG87tY;YLa?0LtNLty zS4@r~J6;Z*G1AFL{)kM!+jCv;Eh4FFno=Ddy_n-RgT6c4^dh@Sxz( zRz2Mvg(l23W}Qoxj)3#a*95UZt^?p;$fzGAGcf4WX+;mm%DAQG5p#_l%O$$ksN z?N<58C9RmELnux~ZdZM{mUeI=`jlBIdVgJ0vg?c8d1^Q9{%4)gMunF4swT;izgP~` zj)r|w49l*|gw!dt;?%OO$;_YdmO*ng-i40{mSZWODen21Ga2DLFu5&7hbU=@9g|E$ z6R2LjJFE$kbW6wkMK@t@5o&5uUy%f&#BgqXXw)_7WCLmAX|p0=E>@H9>@8Pq_1jrB zm>**8zGZ*ReN!5#ftMVYEza84Gu@lg+jky%vH!Q0l;#IFjMA^H6q)43 zT+iE`x;dG|$9V~i6I%j;$|PaP)~}*%hDHxPxD3>dmX8zo-1txXE!z&tkQum}0egNB zxH|UIY@bH0>C0E6&AHe;d<*=q4_fR4@_AwY&19OQ!1w069>1rEAFlr}B^Y|Z<9b!- z?{Ea>`0Y=q(^WNseIf5nEO*_7UNHU3?=(LE8FF- zC`s=8j&pD4#VP`L&h51#cjI+WbHDkdL`onk9j1kt3=qQ(CGQ9h5@9Ut5ASN^^*|Ic zQi~28|BA7O%f2@P^^Tp|SG?}g3Idu48RH-~hWd5aDQIXcgS|G$tNJo2KAR7tQ9_jU#r4x0_3RmG{v%9Sljs z5j>3gqAi~LBX!ic@jk8E?dN+SXnQ&gp>XpyLGfWiaY{F?&UkT}P1X3UwnQldTKNA_gML*f9?>~D^~RBhQVu%`6EVY#p3=C5jL0L7b~Hr zE8VBg;|^@>4vvT}dhW{X*9JPo%k(OxSJM9e<9)4)Src$2Z>CXv>^3a>UDXq;AUmdj zE;v^+=WD^^EXYs%hCHTbYRA#C>|t_kwD+6mmyQ_NX9g_4#heaeGk!vA1{wykkA4bz z0GM%St%G1p(4@$~KS7^m7fP9LgrBdUdPMv{?AVdgOZ!1?$FC)m-g5!xT8u7h4`LfoBsXwNb_PsfMOnDbwCR3$d2~X5 z+RtNGo$Sl~qnUJ>lY=5`O;-`HYBah08;RZAoM@5A+6TEcMMUvAlsk>McbNBKhyQ#W z)SoX7zH9l3OSKiy{vi$W_5OGHGrvJ+uli#X=YL2~x@<{1;{I$hzy$X!62zXP8{I)` ziN&w4QfJV%#*6SdtsM!3-Syc&L=0Ajm@ z@#&eYRDMkQakKAEj|CSe4rNXU6;~LSuE7G-*$KALi!FJ^U^lni3T%7TVyfLp_%^Vq zTW~|Y2IE}RK7E!nJ)W*P;coxdlL~CaS+Gdl8u4RnCpdP#%%2pE(J3u~E#4v87 ze3!(OZ`m7b2eO{grfJ2FcAq$2JJ8O1A4UfUWX@&9!ppA<9IYqxpIm8ggxfk`P>8Bt zdH8hPIqR!_*>kV1g#fQE(4@F{7%TV8kZUOXOk#1C6!Q$fep!Q70#a^#>)ndq-KO3; z^f! zLH)7HJT^pG0naNBud(i%;@HL#(0C4)5U+a7Jyb@4LAd=58gWJDgMbBMk5oQ-sY zhu|@}E35lO3(=4(IGWG#*1mzK=YE5?_);5nF_t>%tim(mjPwFt{04H(OJfWF#czWN zip+fq)N&K0C>zD63e$0&MCSf1IK-{L&PG#2qVy_N<@+0{agl3c+Qu5`y2MU=!TiFT zAvpWX#I0F2UAU&B7r$>!&$|Ao|BXFr#MLMO9`N7Y55P)JXNMk1VKVSSBAtwOnQOS% z@P(q8m?Ad%;8AW-RL{CZ*&5Q;I088|lbUN@E|cr3Az2FCM`+kg(x{=aPvYS1q9mnf z#Yr@43z4R4KsA_$#AEkYN_qtLXZg;>v1sq0lF@*~$td+hmj~C%?VaiUP_@tRahtv} z;yuoYU5$Rpcrz;!9YjAjUHOX5Ly2yb_W|FB2|ZKW0=pWsj!|C|VnJ{tkW$CQ zT{(^Zth~N{hz?c1vw{D8m@^Qu?IvFF_Yy-FE*+16XJYnyzdMK75_yj&dnKXH#Bk(L zC~r0jzu7JQ7$cK+xx899ag^a)CvV1Qfy_ixm*uQ@=DH*zm~S*=+P_8XOep>DDy`hih$i0Gu)eF4lgV3!DPFW z6L+XP*H9`x@o7IVgx!fL8@#Xx*Ad92GUuBw&b$73aOttm1m-*(AsoqXe9?zCD<-tc z?tyTvOH;3nf}F0(_9ZE_lwQ#0b?#T3=rL)eaFOq#H&iK!Sp7iGULcfWVj-XJW*Izg zD3wrL%5&z3izAf{(M85@UgUBFR^)C?uWE9Tmtxsb=I#Uux1tNQc>F%bvQdff1zFW&-%ybcd2n>sI%g&;Gqi}&uoT=LP-X+Hp6#`<`b|n1YT}K8r?jWPag|v|XlK_9b-w4hhmsIBw z&AK^ci2sf+wp~{kNZlpqY5ZavRjN(oFREGnotT$eJBK<@NHu zuyX-87*S<8-YQ+3gcjUQ>5KEMzW&%IgOrn+ZePe2u;vF;7*cR{D97 zhb*CXA%CkQXS6%>;yQ|p!GahR%jeA4!rz}Ogca&M*JZDt)lE9I5h>kGb0F=DBF+kf z3bpZIh~40_D14{`xx&k26&&Eb@fY)gBIlc03NuHj9OY!<-p|z__ulR3SnK3# zSxEEHDn1pW0^1dLbD#nc{Eu=!@Oyq$N#6u$L)U6;J??S>sVi# zzG&Z+pFDQ1;a`=X0!$pXJ4Aav;eC^W$cqEYXM9E{*=EYErt)V~HRph*ldIEmhQ^bw zn>Mw9MhAee{ne8TWDiHA$itCAhA3Vt3=V!mH+cL(tig^hCD)a>D@KsPk^mr_x`pRx zpA#tYrM+SUiX9xU;McHGb$kDe)>1XA;|<^f2z2%}-rrL%>@Fa0Mf*NPor-T(IRy<)^KB4mcHhr_ObQOo(0;6;`IK|T4?&s`|a#^T%ZA`V;jm0WP-qp${IPFB}1`BBw>(y@lzyDr?A?$WzvE1FTa zSh*eVZm2K8D1Ss*&5<8T+NSzU*^~!nD=A8~EKo^sfqtp3{;m6zt`lN#yEVBe<{p>o zc!q`o0)DgN*J@vtvO^%3a)nMJy{m}B>n6lhKcA|{6iiocB4e-|l@T5&WFy*HH++rj*( z22K%u9xC!7>2V=4>$R(4WXcQNT2`&RNoW6v`j1Yk7sI6GIG$AbYR;{=18f=ZKm@{n zMq_T^^0E6l@M6TpnYR+FsaL`9fpa2!$wM9-fn$`%Ow1Mf>%4rv(92XD7UIDdZa!-6 z1G@?OYYybFFT@YnT0z?dRE8Y%jw}CtVMx4!)awR?E zW>hk3O;hBFqkR=Lv*O2Imp$}6)SYp*UKz_&@0~wyt-zF;$oC5lbRCu1j(OEV?WB58 zDk6BhypiNqlDZ*~u{;?)@QKt=OqT zUXNZ69dTj+Ai0^r*E$l|fIBQNH53#3Ux<-}xH@)R32| zvvvcc-=g=_nQ4}I*>PyM_RIMQJ2f9&rv`~1dq?1ooo@ZIG0tg-hZ+%AVelr)`-L-@ z*b+Unj`)PKpiT~-YjxP_deO7{NU}_Tp#ermqyB56{=MiI+;yCW;`j0tL*!z~2^jJ< z4dsZzDg4q+T~{R*)TctlgPk16>QeZrtyl_bHr5&1MM@5|YT~z!A0k}59us>bY$8HcpA8(iq%*xxtl7Udzs_ln={~Iv~cQ+QMl^yF+8|5qk^FZiOtCR;lj` zdY&H3H)W8|Td$cfO#5H!V2-&N+eJL?UKE8ByjKb;l7~Qcc&B{4JP&EIh>PT!P)@CAY1=y{TVyfP+F<2plmRwlrA^##Drm zi+sC3b(69$>Urt4_O$)O*zT0L_bc|z^Q1ynfE)FiW%sF4bA`K9Xr)b2<66jj{D0mf z?5jHTAfCyiF-jJ~YFOLO-Nfx=jooTUoTz8IFZEbLr4N)CgPI-yHag@=%@Y`KF7P}D zz6ff#LvKqhE|l8v3YtE&PPFQdH06D0oBr-3fipxrDdEW*PxmhG{m)(EmN77Ld_0%> zy)R7pTW;U;k$ph0xejL(^N2$S*vs1|t)L(?=@R6%@k$|wNn-GbeAOI2_vYIC46Jr3 zGN?AlXr+$qH|jj|#OD-z5%OLsQ!sd_6$BvaadSVHJ?-#)-xsdFY-G0Lf%|kgCYDVq zcH`r;d3zSL2Gc3Gf8`wc4{rEYsI$GQ*!G~7)t?WJN%%hCbCN?_ZPvv=l^A`(T=(Yp z*7RQ;?;=oMSVBQXH0f(Y$WE;zuYBB`@xyPJtjwD7(N;6oS)tykK7*^h{-NR@vg^xH z0ROxP4QzW^47pXjzBCKK%8U8PW$&C{y)$6`i#Vf#ck64szmb#YWEHU!-{cmW`_C62 zR<7_$yvW@OVxUhwUs(meP+`7JNm;03%0a*K(>T)PAF;Tk4LSN7atybn0&pGUXeGso zM*l}yDY+_EqrVRc|z(JJM!VW?h@h$~lH zVC*kyYXxc3aThzeb=tfrkJHCi3Z&#;M%Py1q^QK?zA!!Aw!u;v12o>(2f9Ue>2rZ7 z%MdZ5egLtzWX3<@Sdz3fuT7q0x$;TWt?o!DjJWOXY5aFRzIRGZwhldsvrXMK4mrRu z@_9@&R-oC$GjzxkE@P1VoE{Wpj@Ude%`E$;ON?dBL@z}DtFjxM=4O^3e)>4IT6>-- z(^W$Zn+5%spx~LUbZ(%!=&b|seh4nqTI?0x;GgC&rUH#V;kEiFYMT;0pxihi7 z&iZCwpB#<1UJa>1uuAs#tgXp~4`{uY(_QkNU&lCI8^?|69q2h>KT59qz#RM{U?Yyt zh6h~1G8aG&0cmH*dM@vKZvdB)U-gc6CEA5Xdx-gq%iy-nhN6d%SqqHyyPSF~F)?b5 zn|A{nk8q}d#eU>bt2Wv@d1#otp$~0cE;orTW=c6ixnKiHdT~=wd_35!zIgy6j-)Sd zK%Aa^|NBkJ-OI?>+fDw`JjX`a$Sd?WTq3S#t_9JZecnaLL5Dmy#Ode-FpZ6>n-zA; zs_S?PY$95DO~~rnA@QjfwyfBQXlw?*-$p%z5#^Tm-uGk;CL?Clo%Ve=21A`>lpL@; zM;l|^BQkuyv>22`>Hj%$G`lMXvm~TNfG8xf^jQsu(6Xr?m%j!}09IJ->GFyvMhVl1 zt)6ZVIyG1U@PI2(oGp87o(ZvslO?pjN6t-rsSM+|6EClE@u*(jU|tTmW&5@+TCmro z#8s0qN-U}E)h!DR&~&O9X07b%Dl=x7xWxTh3zAYQC6970GE3cLL}+3KZ{{7NgPtve z6AS^HE#8~K;H>MWl8w_12B2>#WCCyLS9ZnBR_*i`n&2;OKmH zq6CboKx0jOe>fQI@qfP0)Crr))h8Y8gRA8D8G?@v)uWt-q z4X}AjsG1vN)|D-dFsloU(;=E}sS1)vapyMmpOmU?wTW1DDZ)dxrpg=gsi{rddOPII zQP8rNG@8VdH*iFA8SU1K{wKapM8oGtD7R{i_BHg(($VG*iL0i&DfN(B;0VpO zS6_{$L9v;$?5b}qdkxPQDEdDm{6uNjnB?T1h!6hmSMhI45bEb&8(p+}sCZ!q&pZbE zYmT`gpsO;*$bphk?}B;8f$-B!H(zKV!o(@LVTIcJLF zM6Z#V0<+QsLRu=G5dEq4HYRg#G zudn@EOG1WaAEXq|YwN9LjFt-Q=2tf(OfpYWt|=`!QMKd)?zKE^%Uz!ApLX7?y9 z!Z(iv0lE2dnP>_VMZ4H|q->;owomq|71#!Kly9oPZk!KP#|f1foVu5}w%2#ZTpkG& z-h;f@bv^&3_@VA+L>3s}tCa7q0`l(Gi zd3k=CQr0x?v9*7!iQ%}V?6{gExb)5h)1wM; zV)yebJqn;H5bXAQN5bGeHz{vqsbZa%30?E9~;>>8k>g#${9Z&LL(FH@m!`pB{q zP2S?G!sb!y&oFb=dP~;3G}L~6;YW^9TFR;3T-5QX3|G1jrQTX|_Pnk9D<(U|l*QX% zG)I|U%i>)~3?N_FA4mKniU+nFQsgQYlVPX+zv$P;9UoSR+80HPYc~fxN8RhPpmB`N zk4i4xatSctKIw4vT7n{pa}kmEK0NM`0%0R+gELrpCtrqX6~^CVyH1fUwJ0yUTj|cvf~9? z#xhw$a`rKMF~T9(DppUscT75YR8G=<0y;;d&R5U_6bt^i58WdzRl99dc_$092b5a% zr33fqr17=2`Tljb1r@I)zcxgfJ~ARQnZuA|FvfX$Fy*URnr#LEO+aLhn7bR@7Zv#M zxAIhj2x5(eAvp4V!Kxe0urx2spFfTAO?2O`WLj|WAwr27UDUk&O`1=Zw-`}7hw)o1 z5-$4c_!HP0Y^J$_mNm7SSCOVhG-#jZTRNE5LDY3lbr@Y$Fwgh;-OCN?s}p68ZaDuM zaeSoIdiWZML%^Z{3Z{*1P&2zgSgvrB`Aa;E#my7i4+ zGK^@gc%9veEKh2?&bt*QC3mPMF=#y0+KxvX&#_%B{)H7Sxf_4^i2y$HA2~JsjrJu0d9x6u~QCxIBY97VcejU;>m^--FgY^0@xnd^pi z<}kzwqhNSBdCw#LT18btHvMVN>T91oaQyi>Z{T`ux zK2}I4z>7Vwzf$@qEd|0(qa%H2moHn<$gESPk5I4XBL->EyBhOBbq_RUBYW zhkj88&){}9kZUiJw&(h+mT3fehc=uz+di*^l$)(Q>157iG=&5hpr!)glEcWwBRG}B zkc2&d5CsC-6ZutBNm{sYDScyWK7?-<^39eeqdlS{Rw8DIKmX?s4p#3(v(gRLx@jRV zCsESL2@_^<9Z*K`$|ZH5Hf8^S3bw0fkTEz@6xi%^dIZ-W>_RMAdm)pfF z%T`oLY&gl{q`;ulfu<0qNeE8^wuKFqmpyH~VeC1*`} zwl%J^y5+MAzKjMuIVj~ra;?H+gR*3M<9)0M?ZqgSju_Yl%c&1}kV?{GG~4pmRt8x` z4sCAg|FN$kVb_yqw6g_x1ge8Mr=CpY|MjnmL|>c_SnLCL;QTi=XmopK1b7;WG|*h0`6Le-)QmhYk`Y%w($svsn3#z z4-@BvhX>rENL_mWb1fq4K^W#dPuNUYS6^ag$@5Lrcg_dHH(RYwDrvdBHP1adwroIS z?m=;&V@jVtJU&KCtxdD*%U%b@8m&gCV@PY2bZS;a%0FTD>~Q46whxds4{{+`!kz>JF@Y1Mch@@LudQi-yZKU50rY))O*#G-c~j}N3zDRF}< zSmfos&S=`<%lojjz+l{+%7Mo} z@-}u&Sk93H1wWP@I=fnHgRmZ9IRI1&ZR7IjmWjE#=N{bBHHIcjVggC7Qy^v{z92nnu1nT{RQ+7i!@3uwjWgY)&TjMZ8r(L9=i z5(BqxlNqnTW10DEIa>%xaNj+3U5~(0!~pf+o%65V;)sLr+^W9|9v6J7$ABxYTqLsb zG`@;~yU@+|-{0*%9){oGQypJdffIoA4Uq)eN(anZFMjR#u8p3RhMvTkvY3>M^2_zP zeR+8|)O__DSJDZsv?0bQZY$YTck`K)j_lQqKJWdWQ;jRZ1j#}oH8)FKU$3oDoK zraVolxLviho!r4QA!RO1T;S~!*6u8oE*G`cxBFv6YaiLG#*PCKOzpX6chfXZZP*e* ze?J`dHI=i^R__&Q;1Y7?i%;iLE+OTKF#TiXr`Z^_STzQfVfrd$u92-VUgiBYeJY)* zZ?z=n4G6N+B)(}vhg}9q*iuoGrH8Y|&z0ZYr(y4_>@^eaaDl4=<<>c^(Ya9B5O~TN zfii82(K*-N2n);b9V`b_vP+r8i1W9sM)U=nqAdMvb$vkQ!Ci5}JrQJWLD0m5Wd9>M zC}|{AJ-A`ZYRnYl!#w#AT{LU6@0_vg)JIjyKMHKEFs;1u<7TE@z~nBgQcwkNM@-M6 zw+=?@!aM5W#bJGMs9l0x{+Y7lmY*qJZ7fA5=qswsS?j{J8sIYVZ)eo-#+|WlLxyLy z0*VMk^MHHRUiA*^Db46#|C~rSS)x)J{ z;`^Y%waO+C+Z`SXXrT=y?%fpD{F@8vi399b+%ipydn_-_O_AUrJiFK5BY8{zO?kXQ z%6wfdyi9*!91DJ$taRPCL5`hovHvu0LypzdmhF3na7w;86 z=*FlE=WTrXBn>|CajIJa#>v@;v5tbY|k=s*J;bS5aEh!;HZgNuJYLj{gt5QmckqW&IzwfVw+f0xt z9KZFLI~XxU6`?A7iDiG&s|Dxyu1mPXun;MMJh}kAYZsE=ZAE%lL)nV(4%#_ehYf_a zh~%mi5~2d}=>R;96kedVcU0FpVlfs?ud~z7L$06}d-89yYDkN`iuqF${?DQ)XXv<^ z@}yDRjCYh-AV(Gd(U%m6oWdIVH5PC8KH1oB{J(|on$;dg>RL}aP7~C3CI{JiWY%4Z z?ZxQjxG1!K>6pb`F1=C*Umt?K7obEt0OB>z{849fe=MO`MZn5< zuym=K--4!CCF;Rp=fhI@o8SbiiXxQS9k(CnO14&8WWWgUS?!2`?q~pvY}fXYAT5&G zUD>hbug78GwqbN83G`-5qO4uUaka%@maXwm>O7qriFiLpU;cnC{x)-Jd>8V{3zJOT z;?HZ3=VU*2nnWudKvNv4K1r^`#DeA`hJyrgvU7UafBNPUH||0qMj0tLH!XiP&_OR; zM57v^)m>!$n!%;H7E`PgXm@dg6sa6X7$v31U1w}2Zms$kX1O-<#3b+w$a&3@XEXD) zvxx?-RYSWaIAdYF7gYD%3#Cy0ugbH$m~b|Qt8?g?Q}4+af~Xu6CB|%|Bpg|{QvMtc zpHb`Gon9`oHpkt)+w>^IZp&cm8r*oJM?@=LU2Xltpy#Hr@R3w8t$^y!a>P>;~*#l|(qrT3@TSeCfXi`oCMnu&@ERpRQgTcy^ir#IaMs8{w^%xfrP9UDJ0Nk~L`zV_;J2ukhV| zH{YmNj*hBVyD{5nqt*cY=?g`+FS3<^w~QzR#1IB56EL^L+)s9hri#GL6Ff;sG8c@F z_~&u>vtOzlYmb0|y{=Bvzzyu*P?1PZiA-IUZ=uS!#Z9Xkc#Zw1eJ-PG0SdCG4!4S|Uj+IIanOqTQSz7bg@xQ4FfCY$Xdxx@v5YS<)DB{z z#p;(pxp|16I2|_pV}G&#b6DO_80yg7fOpDoG9N6TSkjL%<;p?++|{mPd9AHpD{TW( z6wuW(o!m3Sokoumc6gqY#CaX@&Mwk^buMzid^UD}_AY*ZkQ3AS)7_Sc5NYyf{(qq} z?WA>3?`9i+S)xDPoqgJxP%~LP&4D`CWRC5S}6xfk)qOuuvi(;1W`Dx|$~7W1$kTsBGd1*I;29Q&g2X z6EXZ7>oWLKukA?ggRci@CbyI1M_^!8kg$6uih@z(zZu6dFFF0IlB$Q;(5s|#d9n^wFv&W^mjV`v=?@bZeNj6_ll(w-VBx5b?i z!Xvk4X_L+SjEgx6leO~gz+JWMRppOMEL7r9g&5S^GeIW^*T~I9mn=#el(>A_#_8If zr^N02(Fq7^lbE;`$kByLrq;WyMP)qna+oOT67|HjpoloVV>Xf1Xv*i=Y5C@^sm*;Studw>zofMY6lM@AdH)cQLu~V55cVkj7xapKQ0P&5Hw9Kh$3iQhr_%dh`XA3_}V) zP8hx9V*N<#(jWvwMv-N2^ZoTsJnEY3yXUhsE{%y_u*SkB z_rg29tPA$O8|b-d|0Yr6Cy|isGdZy*SotTebITMYfF>#fGx`5MP{<&D7C1NKJl|nI z<1`ID<{-P)rgnyrW)PcG!>Nuh%kYw$D=7j5=Ul;jbvQ^s!tLQ=CstMGY+@M`O~jOZ z*SMD?*jJkg_Y6FL#p63G+4xssOCy48`B5}~flVNcGxW4O(=A?HA!r?Ic5dM}yloEe z;z~?U*gcF>hu}4d5lqcYut6j#H}SJd`AIW&i8BOGM4L6|Tlp0HPE&oY#XZY4uzJ%C zjrrkRsFvQn>{v59rDvZ+X41X#an~pt1RV__c1pzSh?d0YA{D zPofI7Vp$sTU9md~G1&4aQMtF@*&L4AM=JPIt!C&p4(-~9s`#5_K%-0pNOg^y(07vJ z)(!wk0tUGQ%!meIf*YvZS5jZ4EnEZ7*n)}qe;i$VAd~z5uWy}n5vPlE!A_~1noDv? zF5BrirHCELC710;kt8k2b$coyx2*`}wn{9yjF`Kv+*XOjFt-fDnz?M)%r4LQJ?HnI z|9E_!&-;13U+>rJ^_K0IrBA@-4~kqSere7{XoHdluR0A$CFHBeWzucodm z|Fv{=Uk9$+2EH=PjH??tHbr)TGw*)=I+%O(xN&-`Ic?y;ZfI*>Vsh{NxeKQP!!U8I zti4iz>~l`c$?E_?P))FlOZl%s~okFfV@-5&FP<87>5 zh#Fr%U+^x-{1Q29$6@6Q*b1RJl+7j}Eho&JdFB^Jw(t(~C=}H4=lA1In>sB_U`b`q zpb_`UIovk)uh+}>SR~g}dYl_+J3Bkc)k}2r`6-T!NMNibXbBf~9q;D*Vuvy}Xo%_a z-p6I-H8s#`oEBivLB<}apu4s#&`Q3~KqEiW8Yp&OwxMG{s>gNXXUsJjK$^Y`dnNmK zT`IoN@mORT23@v-RLP4xD<`qVQuVTJkxnts#6Q+Htbu!F10gvJ90=y3-z+E8P6eU- zx-{u)QNht#D_G|Om;C*(KNaglCnHXA-CK=zcykzUhxni&Yx`Is^2Z0(lIgrX##eIp zhoxU{1$fYFcu2}=H;?lE5>6hG=6Kdn8#d&0?kHZjE#MtImL#d;9j&ubHS3zs{nN;F z)H@mPWToC0)?ukeZJa!SsR(L-pmY{gW;*c&>7V#tRweVSj$->0U3|7H5~pA33&s15 zw%6<13+_#EhX+|PAgne&bbZ!46ks!eak&#x=ys{v4;~$IqztwE$)50=o zNG8Q2He7R5LWK(nbp&XzA70JWMf zy#ff60}bBp)ZE{AP0ai#I-8)yFHHvW9sisJm|)7`jG zw1%b-L=H4spp(Ea68f@y#}gp5aw4KQsTi#6{R9dAQw~9D>QMXBK%_cfLCdEXYpBv= zK;*X`Eqap-Zwo_{*FaY*F$^^uG#UlV3qAKX@}Ls%*{qubO^g`JN|cJQxZpX6N#w18 z$O%U>*<}TM^ZZTW^hoh*Q(Glo?Mw&KIutY$C&Yy%;*Uq1(>^58%$ZbqaBuvck1dj) z|C{g$6gz3Ag(azl64q&-?g*NCC%q+8XY1B3Q~ac;5_2M@wPX-&MrbUA&HJ3>>TM*h z#gEH)u>|9>CdA>Ejv6mds!{lXM-IClw+V_4!mSYp%kafXt(@&isoB#vFP#3<5cx_P z{-hX{c%p7gKeWJ$0sMb~T=jON&p~&|O;aP}JfI9 zxRw>Hu#@+!X98$ZRj~-!dOyk@@b}p=N#H1Bv`^eoi?9x;y`7;z z1>$wHjhU&J}0s*W8o zGxR|h_I^ReRjAUBY!jE1T$zBlzO{JT$V#QG8@RFfi}!3#pt(aa?iCkAn9dFRjZWor z68;G7atR^gwK7HXMooid3ZcP|V3DMop?zYDhLrPOaXZ@)y@WFi{AN?hSQncD83~j@ zJqi`kO#}DKET>BOi(uv{t#Bs97AH7~rFf%{7j;xr#LRbB`;Wx+<8Dd|pG-GK8!TiQ zPeR}4_G-ssjGE$b^osW>(i&jFDnWfb!nkl?iGP-$qr<#9PlFUn*G)9 zb3G~9DsgnK>7NF-|vigdZ+7BlnA2rZ~-zAKG5*p)( zESt!8$nF#z=-~sSYa=KnS7sMVnUsP{@tRzR!SB6F$l(I`pxz=3@!us# z+V_|p4S|h8r*sNnOPdu||Aam@ZjW()cQY%hu#KI?imdY1F)Y8(1qGgyrr?_#m{!W$ zDo%MOvZw$>knLZ+ib!ts;v5i9S64W2apz&uHP#AUn&t`7JwZ!WI&Y&?C+Y9&f?_|6 z#%>nY&2}4TWCCMwB{fs%5$E>2v>UtfZR};1Ok9f~I18&m}OIK>DsK zA%|$~I^G%V#yr!HXbpJh2|}1Dg1NM~xL|#EnIYTV@L^cnVLrQU^-6a>{gyVCV3|J! z^$xi7agJ2ar~{?l;2^?GZps?Zv-W;qZ8hPitS!bpjQlz~@hIhCiZLWddsaIV6D?n_ z_*JSQZ1fhnJ$pTyO-V8dJW^gZ>F7QmeVVrw;mUmVd7c1gmN4+UuGJTOa!5Q;U%O+b zF4s^~J#In)5p(ceiGW*+GQ=z*EH2I6amXQf*A1z{4aoH~e&KEIl{N9snUeJ1Hd+7V z*=ES>1b5I(YMW?AlcyM|O&2Cdg1pW*!MD$?hHC=;a+^ZR@S^%FYxDK1ygoufhY?p= z3>Yb#48?hSb^^U;IWkxbmx9lo$`67%C&_u~*&=C`k4Ukp1AgIFj4pX}*xRot%yxMP zLwmi3h%*0j+=}_Xm@_#S8!W)@ECegZH-$$h|Dp05U@u2&ir_~`yUVw>UK>37vXttd zY|>T;nYBy;>5}kxti9Dyk$J5x*CJNsgPDqX_d(#hOAEA&%~Qq(HFPV48g!zWw)@xg zHcU{@T%!CE>gL(+iw|?3rL5|`TSTv7eN~;o#nW?~LTa%4E82m91zrW%PBM#Hnu5 zh}OG>fLi|R@iLC`ue!J8Q}XJxo@(NU&cMFT4;)D@9)1?$DM}&NPYJ|R)Us#kl5L|) zMhB!UTOsZTwXAQKchg62(>+t7bitrRyi&Rim55fSrRj6_S}!)S_q_cTr)2V_>(R$3 zg4KrZlE{*q_zU!A2n|l3Ft{6GF>7O)dv9Fiz4CO}hd6d-y$+esO5UrDe7eG%twZeY zEdJ+CWm}W`Kp!xSwTQ7tFJGumZ=gzM=kmk|l^yLvZP*Mv*?k+Dc+n6paQ}B*eW}}? z;eXeSl4o)MWY5GIFotyd=Xg&c#J4Rv-$c5BbPMz;@uwN5eeKm-zUsG4vS;d*A^Gj_TaZk%&HruID`}E&YJY<{IU1L7_zV4%CXuOY2Y+=_l zT-Dh}&phHeR#Ulfop$j0y#R11Y&Zxa_nWyHfSDFP$UXd|7!p!)sG6i^4fV4!2O2Vyr`K4?q1KxEWwCvsqdF1RVv27XW(>wI zI?W};h&Zskh${pXvGcl9zIj@~e0pF}cA!ld>f7ql=)dcZb#dJyZ^K9A>Uv=3(tT5M zk`8@Y1Q~5GlSm8&eb(3er)Ai8WzPvmh9XL7LwgXeuv9-iKsz=Z=ovSVf3LH?=tG_P z*Y(=O*N0^!C+@HnCUt+PmiJFtiUlMFRfC#CeJD+oTo&#;0scmI%JW8O?g|JH`DW$b zn+YDek?si(vySn{eX4uke~Yceh_H#@QlU zzL#OV4K)SOo$h&dF$SBv+JRJ~4zHoL`YH@U^v|r+Ht^Q4U^Kzx?e~9Pn)rtA{>UTO zBNHb0;x`sw!>Knta;;{IvK`o{9+R$)JGAQwt3Q?QFgx%y7Q+_*#6pNxb3%C>cIpR! z1eb!(Y?=XR8U5pji6KmseDYI>K*6ly_>?GO1GQ3|()cvWgtDLp#Akc{Z zWwHwmhr6it!b$o8Z&)I*i5`j70c>t+55r3UchB;LpGls1cgyMyl; zmpcTNloB@guU`O=WLwNudw_LsfT3s(7Id3CQaBe)n`J0bqjYji}tAN(o|K}kG>f{Ex)o)+h^ID=l&`~7}Q;|L{Vp5w&x zKd>!}OSXT^_r2lv`bl_=U_og%_fOE+(e03EY*~0(CboIV!?|^z$KRB^QPXc_o=?s&W;bo&Nw7g0chHQLF;Ym9*otGVdt0TrH9A z{XXV7LA#=7Nq@=Bg4HRPfA>9h#Qj>D;}!mra{IjByYjEx%gHWHIrD$Fs)q031Tt!iaBEtG{5JOK_xz_{ zz!Kz&7z7xi?g*vV!H?s2vse6uWo{QPn6H)g_K*D-$Qe0{C%?S(w#06#R`Rof;{NSdn*Zp&WEV4sdmRJd#Cx`x$>uCq8 zZ|pWC(xo7h>y6Lyof^Gr2$Ta%E#Y~+-saiK-UE3$$7o-mzeI&yr-*{i9R5w0`!}Y_ zwyXZ*-gzSWcW)a4WB`Oh+&NZE|d`FP|~nlmLHsR^M}GOOAH5 znjJ-_sO)15Lw*B;_JlGPh^8_e$s%w(m*vS@-jKRekVBDjHz+EW zL|+!)k!E{*HSNPxR7ZyJdyt<(a4Ac&j1ytdxOJGS1So={Zc;dM#5enQXi|9&67fS? ze%3M{^9LV&sH*?B?1h;K-^}rVxsTVBP0Jr;_vbkp<8HnD9$puW#sd++|Lg#W*t89f zv{-J+WDKhekp*Jzk{&@l1kgL#8R+ledR&dzdbHM25VSyfANa#8pb?Sg3TY7*fCJD) zh`0G5VLOYoI)aMJ=3dqA-NYd%=^w>1_b(0KDmX!!Zq7i_(8#Q1f&z!%xJ!z!Q&@#) zDK2N75yqHf?ZB7VP)SI&ALwnf$zsE-lge#l%&GQwcE^@WL;RO0%ORm8vbX~);AhLm z)F%{S`sVO`SB~!v5qoQ0rD%u)gA#i)XvQQYKU5+TH zIeC5&p7J>f$G@igRtG*W;@@?8W~+KF%Krr$eX`t>g;gDJIe?irs>;247YDqQt#W@_ zB=44n9HD%1#)X)*FE#xcReHSj)WX=(g&R4wt3El_k|$N-6=z;`$_4Z) zAn-=GHoe+-gd>{AWbQ9dH%yI@oHE9P+nR9aZBRwH)7%kqPXxA2VEqLoKY5>jJ;O}P z-m7z)(W*CUG+#qZ+dqO- zS@q~q@@2^%qAA2a5lLd!&rT1VBuTTpKZS<%DmEyeWkq3}Fpi^8tCj z%DUJ=^KrFTh+3)0xuLNtb*lQqO?E$({L75S$&_$CMLh%OEu{Bw2+ zVfSX4C`F!&O>sYqrQ6r1rc`XZQK1fa6YBRKzQ-*CUMOsHuU4hBRKo-3um%&)ebRl> zY8KIgqJ*D&6=*#TSKkt^RoKGdEwlL$cvy2SKYE|hiOqkk`Dr&q@#vHge)}CAQxPTJ zdi_KvWmiP2cV&+${ODS_q1uN00nZ3ngovLXMWAvqFyu{U0<+OVtkTB4ko9LxxPa zC(q4^u6mYwo(CS$wYhuJ?sYqUXUq-hGWXwg1ez{5D)N5=O>qWRRpchEl0riATTh)~ ze748bj=tPn!i6tpa^Nwzkq4zuUdazob+c`Cf1L=C06K%;ZsNrszL_5yzTk=UYGXkA zkO*);Mz=+>O1d+8wk)NzdGwSk&M2{A^;UUK&svy`orIt)P}qv(?Y)~-8v-6F5D=Uc zCjT5n8lK!lezzQ3_G>6BMfDCMM7{Sh+fko(k^OXJSSj5iV6h%b0|ZwxXZHQ2+hy+Y zGkBiPox7%`6}FhKh;olUz$^@GJ;{G+i%0g-m|-dTNF+!Ll9z1Dy559*K7LX^XI+Jd z{#Flf^F3X6%b_?_H8+}`W>Qt#>c5gOqjCy}w7D&xH+siQt zu%j+P1kSa?o+J6FkXH*)xpKdBpMeC;T=1gHpTUTJFe79%S0rHdX|XlOnd;K4j^Wh& z)K~m0b~?ug_rjp;ods7rvEv*-Hj|slPvYB$k&ntt6~Ut8zf-7lz#HjZY#!p9U+g== zrU-u8rgNdabR*HWl#@=XA1ARdyAl0k`v-67Xe!CO5`!Fu%mPaK3xj-o&@9w1mdp{R zr}=G;k-(=h|AHKjZ_~^_JituFA7ezSKR(MIzNf7UCFG7pr07I$0RM@}U$g$7ohSIf zTHBzAR5Z-b4l66nC`_dc%^S=Mx0BC2*tAY-{n{!;qs_$Nefu9I`8 z%@x?T*`1&6J&P2bRaco;$+rqqI^k3_6sI~AxEyJmXQrNsvO_J7R+6z8*YY;)KW{w5 zoLta%UiQ8amua*fVv+w?Z7Vfc!5HM=Q-884eNnrgoqr1HSuK}Itzs?-Y>skzY5K4x zfQ$!UXhSpT)0AC3RA+-GFO@tZn`86Q23bt`n`H()QYi?a3)YBV>4)0RT`G3FgXu-) zR*hyRRrd15wsZAd-hju%R_d*vJzbBb$Iue7!_zLqEPOz`6ZC z6~FW9J~5YOG6=@3a5V;oea@@Ah)M6*jOMs31hqcDMrJA_VVVAWPa6@^`ft=Y{#XmM zO12_d8c%~8D9Ef=dgSSmzinqyV9NrfWvF*1DLQ^9>4zU1hL%b;Xsf9F<|gd-+kEu^ zc0L!(Z>gVkDRv^)#bmeX=Qk)f zYWxZo`W*c|cmebI^x6aKu+OLGB5WF=$?`g7L&(4DDk1)vIjrgYT*-V)J6@whT2lSQ zCEI?MNoJbof-G@yubR^^KL{UVbHhP;)&OlA&cN=$Vlc?L zW2~@Ux11k!O^j%1?mkc`Cj34TV+xJQB)Dh3ISce^;p!05CW}+G_~q_gt9DzY_Ki#T z3-YfA?MS>i+Ekx;ps{(x!7ua{`r}t*#l!qU=HCPTcsxWo*&`kG|QMIFr zBKikVh)bUj=jkn-maQ=Dq34gtBn?z)UFO%kqe!mxMo(mvO_%kHhxkEN%KM)S=I)Lf z?E81!0U1cUf2zqLa(hAoHFsk%HirOlGPXm-*ZSC#HQGjlCT*v>26ikfk&&|VjG~j=~T&hFdno|3iFO{h;S*=i( z>^0M;=Go001$rYKD5&aCA6 z=^cL>n&;pKwWR@7ba5ZQE&T#mt-ge0t_jKl|G~aAUV(5js`WI|KB?^rX)F;L5eYV* zRxIa#)N=iD#O5l}Ta6YwQgg@TyNum4ecH#;n3rcHe~$Q*dgX%LLeH*|n)Ov?Vvjtk zOM-H?^Es$npc!TNptlxQ%9U|hOY`XoG z&xpEtK^LwF2G4{Kw{DL^gr02k6+&%9UQPAE~xqox?xU$LkZT81sUs^A#TwD7aBL1F|D0xpZ)l^F83HE)4s%k+?wo^ip$)Tm-F)k=7bgPR-ZTCj*C zE^T&75Ga4qf2RKbyhW?x<`fpcXoF;8*&||c$+I@3QClb^=z$3-vJ)9-pI{Cp#O5i# zXJ%EeKKJR`VS>^^CWcm9a``KaR^|>M_y%T(ecCcJAIn@fxXucHdJdOxW|@&SH^kFp zuaqa8?>o{vrtS6(b_!xzi8=T`>MFQ;4_PYDQpR}|6kV%Nj8mWF`;N@NqutV{-fshp z(%PC~4#Se~a0=re**egjUG7`qHyM+)@Ak6YK5Ul+(tA%t@941lhv+A^5j50RRA+LP`uF&(*ff;I z;+hUsD$z5QWghH_l5gZDw%V{z!8$~|EVy2&>$1&d$9~t>?jt=EP;i@BJS9V9X!o>_ z(k|kpyxNs^Jk;rbG8>nk5khBKF<^#D4LG}82KGc22`q1qY)B_cpKHm*do6UD+$S^g z%JLx3$ibfZf9kqE%2p)0Bl$nNaMMtyv^{6L<=!TWbhT!~D1bO^vtAhl(SiGq0i`h} z?g!iaM3Jsq{TOA=_SiQ0d#IE|w@hX#_mt`6U!>S-WVK{1R!X7B%|M{p7henMeWGLo zyX6JoR;zJh+%U3jZ_{;QI76nMyz=8dM}CecKU{1c74NipI9e>8-K-rX%wg<6=UwuX zpvfX$v0!DDN7e$!}y-qVWvK*IbnGU5JRnB*>~{Ek9+A zk&TN~rmp(ALi;dZJDSvLpkt*BKNlY&J|s{c770jBV=s7Q zY9zfqM``=F^x@?B09wvvIie`IERrcx!8n0=^y9k%LXu@yBN5r-sTRp?hSF z{|B2N+7VF-+t?Wr&i!GDbGmJ4NZpoGl3dCy!a_CBVw~Ig_0HZpUc|S~ovc?M`r}&2 z{tF`1uB32?i@TO~flABMXL{!A4o(A52VYU}VA((6kdyM9r7)7|yMlcEF8=WBmeb-5 zPmx*EKCtA&RSj$-L@TLs1X|53AVNZ?=!v6o6Am6nMX2Nu{Rz1ZQ{hmyU>M1S6zgUA zb zv(=+7ax0W4tR&^g^UvG$E%VBzP7MGeC`%}>GNB90HH zm<=2lTfGja#@!8!{&G8KVAab!FqYwa^XmKWtyi-a5SUdnD43(Dqo6b*hIOl`!H5bdCPVljIDSG#=>w@^XkG!G8pTLpTf{1l89KPU zc_~6bh!hY}-Y}=8KG2A;_;=s!*Y!E(FQJtd9gYuK%lWM`9lQGCYB_ytdvsvY7t`87 z=v69tg6!EM(efj#L0Bn@+7@1;bFu2iiLSRJ;y%cbKXovYOq6X5f+Z~Ts4>$qm+x47 zb&;6AW6O~P!SM|$OiNGW%c5#h%d@7wq zXbJ-o70?6|&7Md6-lsSXZ4&UR5RT<_hdE;0drdFiP>4u79)1=KHy>JZ(Wprc2sO?Q zwj4R-u3hz+iI0GI^*DT5#3^vAdzg?SiIQf>trccccmdy2y&blF7O}sdn(F66gC4eG z;BT_z1*`hm1ZUpoRx$F_lk<8{BLaS`#m5K}y7w{CSJD5fr6u%3FZDS%r`7eegMs1F(l>{oA`S+M5uyM~CM;6f zKcnhD-YtFvU1emO)yMj#PwxFV{qQ3$Kvj7#Fo{vVo;6m0B;u!QwBN=n2TLzwr5CPyc>Sl=t2`CcNg3FQnq{KGN{T4q zK_-pbv8fil1>4-qlwlAXSGhzZ2F@HFviSpvnL&2rw%<@b3cLo54MhAA8=#{`;XV%2 zP$2uZ=U4h$lh(rMwg0X|RW!kfS~8-yYw+d zZfTkMB@Udh>j4E9NJtlzbIyfZdX=JB$(1)xBcZ#-1p&kKbA5~YhaX3>bx^ASQ+m0l zU9}cQL-NQ=0`W}ajp`fMviFu9Tu!*9O6vR#CRjw@H-Xs=F1fv>dhvrBFqOQ4PkpUl zDV0_)#Hkyu^vtjpshfjmriPhJ*%3%bqy>GH7|k&P(Y8N@bJ>?}fV!+(i{)L=VGfm_ z=GmCpoO0%}5HA-$n>CiS`7YmTvkD1?=45M&P@ZC^Ak?;!6>~|AIB8{KdKz}UUI4rW z2CbkeS?EZ1>(K&LLJ$nKE5+k%Vgy<2mU+a`E7W&?AT5Y?Ls^y{Tn!o}RnP^$?Dx1i z=xxOQ9r*Acjn^JjvJ*CCkP8?9>-I3yfr} z+1$+e^4*U^Vbh^ROcCY#F-D;6?J;^d$yW5uHPGHL)#u-JB^p1;ku<@7HH%Ljh(z}y z`|00)^n`3Lf2~z7z|M#WZ8%g284Z6m5N6(IW>rGlobQJvm^?azeED&&O%!{JD!p0B zskH^|)^x{e3zRd^ga@5xB6!9wyfb;@j9a`eh^`yr6cvMku(Rni5sA}bcL)2G%mJ`= zxd!^gWdph&D%fFGINi%VZyK@`{#0_Bwvn)KD{$~wO*uiM2CYS zLyscn|4jy3Av|MW$C+iyTV?Ktl2YD(wTup0T*W6(MWcs{4(0FBpaWvqDh9PmY7}@B zbuWGzJK7IwOG&K=8=-%pVa8<0rl2t$k$G(K$CsA(=@Y^3=LbwvU9=HCK~Ac;lNJm! zvC6z?5)}vAg$!aQV|IX>gNS9&3fscsKu9GXTo~j+2CfC=pX^=10qqEFunW0jAjs}0 zn-HE>xuN%pk}#QbW5&PZuGM|P=a6sF;*y%)CkwB$^6#izB>XWiT~U<8)6`I!4zT@$ z#1Pxn zOz8&E{InxiGnEt?C?Ch^KhC+dbUp#JDZ|;=Ey-XljZ8fR>18Qr+mEx@w?oP}AUt?b zvdG+^M=!quAglIsk{+{w zsTy{rGNHBpwhqDlK|ws!G7hXthL=Y!{5n$flsV;Ci{Lvn^v~~evE?RwzK)Yp15}^=r z;@d$>%ty%afl*NZTX?MMs3~LpyY7D_fB_LqsNCgzDw zEQ(5Kdr4Kzb93e3y#83i z3Om8tl>luzExPWRCBF{L(N{6@Y>3|b4{6r-EKm$77tLQ}--bW};ADdCBDd-5LN|nG zkMfSXgTpmqd+X@x{LCnum+`Qb#47L^GN|bp)b|2qw`&SD9)~)H>Hpo@2%-@MgU}?~ zQxR&GHBchgGBf9+J88$q+!_SeGL+iPSvBoe#s-wYLIyFkfZ|9?p6=(ifZDzdjHOu% zh%wwxhZ?YLL7ZQ$&!{I@>-mQ{o(C9R%{1iqCDi^7$_NQhOGv5}o@;F(?Xq^2Aoq=p z2FeT|xO}!Jc!Abth6iShit$?EI@GyL*fvGTxKW}>kj};&hsy5|bm9(K$p!WW2k$3CZ5oGM+=PPl$L+AI7T5O3erG%YCx!7heb8^_ImK zCQ)Lc{AD~9qD7;Y+kH_*;2!*!vE9NoqO+9XD{`xpl#cv_%oPOaCC*Y2u(IW~t=o%! z?%5ON|FUR=Kj790Ci{|ym1(`bEQ-@!9-Utu~8$$;DdX-RU`*rCZ3x8sCx;!I&z$a^~taZK%Y|CN>r z7g_UMb;M(lfz{Z&?!uUBY~~))5qMxUeU)VQehorD2b2yIltW9iM~GjScSD=G{06(_-dY*$K8>2+F4A+8rgQ0{FSEA`;e-pmt*%`e<7GjE6lGO9}ln&ING!dZq*Q;auwCQ zye@*bIdKpGEvUcEevnf|Do1pD~$}= z1N!mL7817hh8%d_q;tR|BH`9iO?cbH9cTtw<}y9qE-#M$G9>Whg=(>mlOJ>)^eop? z4nYD|sI+{T+z+_b=G?ET8RP4U)IHLe4pmLrf>mU|xDh)Sz%a&|ttQkTrxGBhVoOo6 zPMFVKs6V(qGOATB`*JRJ7v zCX?J9NiwI&skRtMx4oH9ih<;H3=Ji-%|rGQ|G!Mvb0rbqqC9fF=<=GKUk9CPBFN=O z0vF!a+)^+M3dM8oY(0?o5c;iEw~F&3#&c*=loXf-N{tj3d9|Mzoy`=#0`qp< zRisO+t8!J*u49Z_otAA)GMZ2TW%u%rRK~XmBQZ-NA__H4{8|_lZ^qV-ECNNYlA^ z>Qa9Zvf7ksLYAb)&qXaDmAC#A$eKhO$^K%UB}CtYu(SXV^OS-HMAHxN?zc=*`QQ@d z_MXUo4iQ-Y<#YUm!&#lPJD#^swAPtxzi{e9*p%&CR%e%4S*2t<5)NO z#lwyg0{s#;c12gL{XPR1ytgo})n#n()=5b7(iQKHM%`m(harct%OFy5{3@qwlBBSx zYF8c2iBQ&%rtTm!Y$Xn;wFyn-5@$kr(-VH9{;b9E8hVcMe1@vc&D`ad2|$>&T{iL8 z((9y)Z861k{wQSos|9XWZFAgmbO6$iYL_iQ3WM*Pkv!46sd=c8w$HY1!>&Q0rRt0| zH+;W@UP7OF!F^2L2e3#osD?fW2?WhW=%?0C%HxaoU~|SonMHpYXgsyZ&oO-Edi6u2NVFbY5;mJhZ8RqM8rNz|$a1fvAS!Y~t z?n6L}HU6Y7ikpsm_%~nr08CVe_mt?wYnpvK0R>zu%ODBlYjhykmj|^tH`^ezt_S(G zPy)C5mGb&G{2fDGQNBw>jOp%mb5W9YYR&Z&%kt2RqK<;V0JUeuU)lG64VbGE-kwud zvPM+C;}7t@ru`0WX0?QMis$>FGmf~oq;NvNztq;Bac>&zdg8b=axSgET3)pHKFGFX zQs3V_rOh`Wa{>>sgJ(Q2vLvu^ryIJqz_xVIF~DfSUl0r>S#Yg^bSZcLb3r5!VOM_A zQnrKC7Le2scJq6&6BRbu6TU`fi9APzvyfhiEWXM4L;=6X^uHT)48odajH3S%s*)&3 zk``ocp(Jn-}0vk}fZC z#q8?tc^jzP|0TES_MW^bOIJ6KjDidF#J9uuKQt_V2$*@fxTWu_@mPn0V0Q_nY~oV; z5kU61-OB1J$$=joL$j^vDN1rWA{epRG7cmF9nGr~qpyZ$odZCd1saP(zfE2Bub+fT zkJxn>ZIzZdCx66LLjiz6qDP&r&qk#ownAc}wTb*3$jm=3^3U4`LSyN~fYevQ#dhK* z{@9^(*s`<2E}(kl$h!39k&jDjo~47ldGjWnGd$LJ=q2619J2~R~WxwM*B}Ts$OxzuJWTY`XBbG=rFTl%M{}PIZ_E1q%4E@ zj9OuqN0Z1d(QMvx1`eE>lc;>=sqE{piC8C8(OXi?(sY_t8%;XeN498H&DQ0)E+)25 zyrl`ya*PvWaZu~r0S!W7EJ;423$4qmps#ELJLB4Yu^7B2RHn1+*!h|b+3Y!d$TVLp zUSS9@5ixM~>!pK}A(kAmPHQcT-?2T$_U&2K6Rk6#F0xaBctp#ThQB`>Xm}mE8PEiz zejc(Q3pmI#n6J6GxOm^#GI{O1;hfl=O@0sDU-VnOtt}haM6SQA^p;|A8eL5WyKA>= zah5qLcYrU<7@b%91=eAO9g<2kI{mr7JKV-6>#y`NFg&_&$9?!krb*xL7cn{#OmE4nPd#`6bovL}CFPwM^`2p|Z0dsZ(` zhn*kabLYz17)lDD3LOI`;1<p7DBfeQgREhhHs^lxYD4JA6|-_43=`)l1<-y{{L)36}VDA-}e#bv7c z&TYo0(gP)bhq8sA81+2pMz%p;vR!*`DL56wUL@$)sL&|ICTc~eIWQ~xTdn6?c9ydv z?nFZ$;#+XNHr;7(orq9S{yZJmd_BpiCh~h4zN_01vg(L(d3;QR@16b=7S>1i->0e? z8T4m+ExF#1}ZvcZv;?-VBASJFQL9E=cvpQS-lx7k7I6=_|LPNN&go zVR#jnk&)Mnm_M#`(k8pRe?qZj4WW`CFX9@&*gJ>M6>~M#PQk9eoALN}UGccU5;=6C z*s#vq>6pI5hu|M?GirL!d|g2uX2bE*!QR)9sTinY-J!o;9Jrr)@8F(2PnG3kBlAZ& zxj?`&c$zEzNaKyB2WPkCsX4iEg$`tI!1 z$OtZb1~27__+rKJ1O{R zEehXr&%H9$_vfqc9L;u?wPy9_AEC#?cZu((dxlo`-r`QGT35cd!g_=!7yZ~AX7i`+#?bs<=%{%$Wi*fS+g17- zy>^YtyCqvL{j8Xun49h@37VCWah^`SH)q{HAZ=P%8J3D=0ZS++4$&cqrUSy-h3TL2 zjYW8RVBt;o{T?-aWluU@W8VdCMN&Vq643bDcd_4PF`@D^_yO>ksD@bf6Wo{t+kC@) zyClfx(wG1|z8#9JdcnMy&M1ncB$&Z3%pLv_eoEnhQbftQ9Njtz_vzWthK}mqk@nvo zx9acV_%|MmsAgzyo)-uN9Z1ay&L#_&^=)Q#iO!K_o@>u5BQ8QP8~pZltrBx3{=oLQ z0gHr$f7jio!3Fk~cPA)*oZ{rNOQw|w_Uc%$j32%VJma3K^(2i@)LtFU^jefk!S9pH zgj^Y~1}L|H+OYPS1Cdvk{OcG)4a)R=TJmx3Pwa5BOWT9De~fRFAPf%;XBT`Hh)pxgf_TiC=_Oho}Zp=#VH9Z zl62#R*YZs)`|pkS=b`{8(a&!@WG<9TPwXt3_3X|GER^1l_EKL-wP#oG9`-3WMatNN zuqxslYMk@fO@@c$0Jp${Qr_Zwv)53E;NF^$0$dAZz(!Qa3e*d=D7EeQ_!~W*XG5kv z&#D>&RbccEX*653m<*ivHs4vUXD}Ww>VJ0?YvG<_3YO--2oPJD&PSUie`vkx1D3pT zl=TVC4x$8RG#|I-U}cnK2@?lfuTU3;J0z*A@%KR3=r5o!=oIvW=mb*fJ1=9`cnzLK z`WZ$otjbcb47T>HRegXUoNyN?U)PP?N`3poZsXmrA0m}Rh~DDos?E?-mtzDLlUpXM zyt$g5KB?UmA%> zcCP#t$}YI4GD7jj+b*|9klZs5d4r!7yc6U09*aRnH=C>YwfQ7=dBU)Z;?EDgES=lP zVe7k_`OI3f7SU6aTa*_7+u8mrFN3kfUBe?tnqxtpsjEAIV(v84#zrj?nHqY@Au`O` zt=s?0C&_qpfM}7=_nNpHF-b#m0gTC`gvV<~50T{2j%C*=S=VKA_(ac`TNMonUDfrE z2_~_hcrqNK8QvX&ykVsA1Kp*TK(?L!gxuwQ$C77#*D9RXw*>zbbGk62F3EZ?4XsJm z$(0drH^J;@yHYZs5l9rf} z`><8aLM(>4iD8(TF?M5i^FF^<-^cG?9&Yx2zh2L~>v~>SfMnA#xGZcc*F@?v=q

  • 4NW`aSR zx^*W%Q)NcbXIgZ|_-xTGjN0hnriG-d$wkN$O8dS8{l)Iq>&z&q*+vz`m$NjFS|l8v zs@jPR@RrJlOjU1&k@CCpLFy<4L;Qg9hwbUQ)irSI>yMZJAxR2aj@<0?dKOwkvpjO% zJq#Tbwv-r=@*IhD+}zPzHooH-O1j0Gll4qrtPdC{pxpid+WZ?N6WBzz5pnsY)%Ej_ z_lxKbDj8qUPnc3%uB=tr>SJaAemQB~X*di~S|46l-RU=C81^VmsHr2|%;{6hU{$PO zntx6zdu=H#c)J=N-REFyiC%saA`eI_Idtx0FKm^wYy$uhFT#!m%x2+u*%JmSED@6< zjkhIAn=A9pmg(@6IB#gC<-4iNK#-rIJ|Dm8mNPZ3;ntA=dcBC;5^_Lcl4K#C%yFMo zzpMU`8pi$yqs#nT^=XBXCJB4J5oSUtS)h*X%WC?xlX2F-$r+mP-@4J`Va6{VW^LNv zUx^x!%*YkE$z@{N7S{VK20Mh62Ce}s3!%c?`K~^S5BmXz_%XtVek5&wT0x#MlFnk4PVXy!HGAh z`Mp<rJ-pUcBCUJ3EQ1WEA9Y;7)nZtl zYm$)XWAWSE{7gFL&bo5{f%k3Lsorp$35FP6`fac0x6r~fe*KLBqcQ+ApJjyo;2D>n zniuo;h95s^{VJclc{Fi@djyq)9QbX^sVF6-5it|8t~GJ=Yu^6eTVGA!S!!#R!e&9n z#}-qCi78T{fv~=u72xuFxzn}`ptTw3?Y!pd;8DYNMYx!E?Q(xigNC8V%6Ra`td(KK zRrx;L)Pn$^2&3vlS2RZ!;u+d~K4Ti>W4d;nc?6xe&N}&OUJqUI$nRP5vtJoi!8tJ*ZRv< zEq5fj;rmr5%Q&wgO4qZDN`%{t;-;M@;%vCn#nioXj$dJPJZR4UG*Z9tPITjY*?TU| zm(Yz9ElUS(ih0b^Q#!?`Zm)=M#+W^Q7hiFeJ!HTdn~`siv_BWT^)K7+I`=+@sUqx> z6k{^%iZ`Pzzi=pS;>NohO09iYLde!1xRqD~qd7n9g?yK?BP|mF29yzf6+-3b8XAA; z_|hvU7d;W+jFR$FVYsqrIjghj!)m<=LqUgl)st5;6xsaInhf2sAX@pK-3 zBDZA5jz9s&*1$c?-Ws)03Bu)~vFHf)9Z+smC9Y~QSq`E+sJrO5ZAz7C*)miEIZeZ_HGb{74(CfDW|V;T z@<@w2c7w}P!+MUw5r6l}hD7Oa)IBSJtahPesz&(^Z`?=j(I3;g+t!5k*_JA+j+j{6 zo9>xRg8MBOD{yIr$F=h+aJxJ;1h2z#&|whpJQ8mtD`|eUIgGz#0aIg#T9QKSkvw&Z zgOfz5U`^D^kJzjRrimb+h9Y<`Pi6}vrzC>9p1JC{jjLk5hg`yC>x-EE)H9SFN;ou8 zljcvY>39}l2OcVYu6STgnZU=($UyyIt1K&L6SAQ!gRccS~O$puTxIvb0ELwtPug5FRobDaS7SC&9StNdlu1` zHUcK-TQZ@`u(UGe30Y{_^*lmQwc>=iuOGG>fbqz}kQMnLLQCcxxp8FO_T_J(Bp|iP zHJ}utZ8JA{a{lcV#%F{W?GUVJj}Kk0BZWuuD@P-iUs$j7lRUNv%D)N2037fKgst+- zRKknar!JZYfc^~w3Dr9&!)SP&L8!-~`p@J_m8!Kmi;e6Svo8prvFbX5^`dn_pOVd2 zoibZMJGCanwZk^_XtxZVyZkvoVKs_^+|Z}6_MVH$4%0u_#t1EfO9c6@1bI5^qw+|ynw&6KhU$GCCS-YW{3Ytz+ zS$xDnO5YnVKnnLsoYx1&esL0T1 z?De#1NTu#ej$eXheg2pu2emVO3$z(I3G#)Heou}PaibkL5)%zn{OZ0%M8h&RhWW~F zWw{sesZw+F;MpR|@>w9KZL9T3Wi)}&?R-9lbd)My7vG&xy_n?kKN$=%@fYLmjjxLj z1ztriNY9;Vmb~-xc-yxikFi%8+n+jGdhk*?m+Y7{jcgN`w&B%YUkhn$H#;yn%I@v)2uW`g7?+W|3ir zZ*It2F1&`;Hm5>b1Wd+UqWS~--LH6;gx3?1>9r2tx~mc4t}NwQTP_hyS;`;EN@A`j zBZ^0OcWFfQQ(yV51a6ak^t_h;VA7w$+}7!&+aWJP8+HIZ>XbZ+9x382uyx|CyqRBR zJ~X`;$-^1M>L&a=HU!#7KrJ5cFq$vxc!Fxw_hm2Ll;y5RGIPpDVr3Cl3z;1Ef$S>_wshH*@u?Knq%a-DFb1&gU{5Oq zFY5_ABjnNywr1(pjsH0P6?pcM$y3X+Nrh){s;iwlll)^r=r_avpgvf` zOBNEA{p%wE%{DdO`WCCiZIYv;c(6w91;>2z?dBzszvs>TPxCng5fB zLVEF_4Fja){gy?nqPT>}7@$UiALjBOCuY-mUT}38kU|l!zj(LTu|;w4zYS*;reoF| zVgr?vC5h_zZ-Z_yrSHI(+HJz=Z|p%+!+gB$Iq2}d50rq0#s>C7l)X^ea z{On#A$Qw&oV!cyZujmgQyETHVY70e`aRfikTn78*i9x@{^~7Je1#F_$3ss3)v+L*P zwtNW!gl)d3PQX7UPXn$`mERDM$YBn8p5E+M)+6iW3kT`Yx8TWIC)`+fiZ<6h!M+AB z&#YpO6XK3;x@fO)Sx}h$qfv1CZROb{MOFkR#fg zn-=${?lk$Xv8j(Fg3>THDjR(R+gm5mhWDP)43RePT?31bL}&*Jhc6V!n?E+&IP=Hl z{g2PQDlycVX{zW2nbt!~?_w)iXol<;DZB9-zEmjBSV_a7XNK~VO^piYe{8dwlyp>gfpf@=wNuRHv^qLu+M&&~Wd1J=CCqTWl%(SL6s|5W`V>n>=(KN7Ufch3Zh9 z&ukG_r1nL@t;2+3`oQ9q_QyJI2DEXsd!Lh)9{HbVXQ;LNGeX+--8$u!{apE3-7N)H z=5U4&DmX^z5fm(WZ809MGFphYYxnG8!YoBaH8r&K9fh7WGDbFc`O~JT>+Grk}d-GX}$aoWF`{t5T_6 z1G%usG{iQG4Dyyg1BF;)9B`-;vHw~Y(np_pph8#5Z!w?ShkgWh)WRoU>5>;-c7cIw2<#`5T- zC4%GU1)7tg>gXCz9<4H$4aCS3WEV;gcDwT_rRg<4jNl#_`lG=j{!olp>A{XOWkyB6 zw_hT{%JR|I@tbWvx9St6G}munUop=@DfvG&|h z@xcZSs^2y^-SqR``wtKIPK>7V(TM=X-{jX=_j~7~VeV_?H!hlYfoBWB7GcLya@XaY zFKZ>f)pJkKP*E()NBL-BfY@yke;$YnOG}s-m@FS{A{-v?&IrY<1!NJPsZLpcBDW?R zmR{!N+b6bAF06SL@+4$+lkwLCR0>cm_LA;@tiWuvJQH~S^MF-6I$JV#T{6*qeh>xaEpNzt*rb`>Pqs*GYHPGs(@OL!d0Rh z<*==yU}#>615korWX7y5o)i_gZ#`S^L#x#ljy2>b&zf%91KSH|sSG^6Zmr2c2d{SM zeZWO3>Ru)$wqb4zq03QztPiD)K<4i3X|~Tg_Ka3}Ul#)L9;F1zLvJvi1ppSuf75xx z7hagSFhO_@#5=@?osTvg5?u&7f0PSR)&5M+0&Q!Wq|7K&QgX1%Ui9<97ysDqHZUFM z49#-@S7PtVj}%!|?oW%w=|fvKA+FNx2|}?xf#@v@{CF^^;Vzd=1c)z*@JSA<>ZlHV z86V|35c%Oil8ajSjqhl~yU&O>U>jxH>q1$rJ{zF}F*zd|Th9hxi* zN`nUO0@lS2jdb}Cd#RDkmxqKqsB#GrktVZdC&z$WAdf|{nQJU^^e^x)T<;6ixCQS~ zC&HVglOba5j?#$|>=E0nW~dn}H%@cWaBGop;LmI%mD!HzKII=~Phg77RJU$!P$o)c zvw7#mXV(Vk*s69NS|@Um{kHw&oig2fZVMT!!$!8O5~To{#vb za=H9AKMZNB8)6O1a!0-qrh{vGGBP)bJ7g1P(F z#>ljr!<$0nv-jwc-*y7Psz4wGdvW!Eko@6LC1%?K=J$bzU#;F@(v$zu_s-tR+c0OUk)C9nR5K}yYqw`Ugl)Y<2VcF*7MwNe0$hh3JMPg&!r#7q%frYC0Z==% zXRx-&e7Y_R7yVnl=tNZv!(E^yFdu8|aB2)1BX+Ut4DZ;VDe>tc^SAi=FL;l$q_Mex z>*AsaF0L>bui<53Z3Q5U0%Bn26w-k4iu(A5@?02^$8s8j0t~31S(b{6O83ZJN@uh| ze`IYaRSP?PBE()x(CYhLIl-SWQo?(0)041%z$gE2WdJ?!X1u=D0BR(7o5kmqLNecU zGr)5Uc5RL(L(FF_Cb}<|!+1SGV*8W!l?~qq@_XMD`%MFy&j0P1gMZ?+FId{}Oag95 zm1$iQI{k|+#xFj+^ve1bRAB}GL`l0vz(o_$j`<_2)kv!YIs|>_pGtkND)ApuAE~v& zs=6?aOaQ}LzFDoFnZdDkyF2`dZZaIIxQ17^EggUkv)q+uE5j*0Q~bZFt3NSC9ay-v zVFPRJx<{KTxH2^VqI6bH8mTD%M<%J?5gOI>WKi_TaJhS4P-MXGDe3h69`|&Z#8#w;wFp zvGgW+oB)p>Z%x!&Gxj)s=a7Te4PbDAbQn}u?R zM5G@w@9LGuPSxp?dLw76{{mgbPLFQo9wD=xpk3$FYUPCQHTKuQ)_w>r^UH9I1u@|1 zwHbXi@dxEPd8^WVg_sg^$YOb$EY9Q0@*c&OmRuZko2@xJE=!5&YAMc!GrT+A``e{Ll+)tz@1 z`WvYgno7weBebVB(3|uS^}CwLho`-$W>6FqC2%{9m4AioF-1-sq#JVha`wy_&720dL+dVS}>|7n({gZ%01W(M6lMuZb z5ITyv4%sKg&QW|aEv0o~8?!D)MO&wo=`x5_HlNgQGqfcZ|7H^x2;${e3$Q#ub+5!a z3maeAlZ_s(g)ib)+e}m-(`g!XTp;5<@6nX`VHbBSDG`$}bDuK~L&b0WzIHzj*_ZNl zHr1*Ack3m1-QR!yWb^fRfMQxp>72jDZqITle<+U)X6lF3qaa|giq<|O>RBv$QPF&I z|5~$^2<`knfa9*^ye)#N7&@%NjuynSUezQ2$q6hD239iZB(7HEr-7oQIeJG`8j?$n zkFr5Hc69pxve%@)wwCq4qU6}Qkac)u1RgZ$5=_zcYt-ncN(=!cd0P)nt^9bkhGe;% zD@UED@5o1ufhLN;mldwyI>hrKZkc7>gb|G5PxW~?mE)}wx0VQ(WT=h};{)~)UY&BV z;nbr~noT(%OhsRSCbw@xO7~2ZSRK#it{SB1t=09x@0{28RHH%r~qMD-4php{LIG~D^~|FvFMfGhz_`E9X|k=AcP;%e)B;)o86&a z9k7ECL^zS0IFAUiHr_X2$v76YW%b5Qw5S*L-o{3NKY8mYwxXb`<98Ik2+W0t=?$&1 zh?*8hfu1thgit1C>~4bm`1IbZogdeg=#s&!WT(Esi)-^Fj1qg`PosHM)7auYS5Je~zJ?~Wn1G+va_*z)~qwthhs;r&GsRp3)KcBSiw#s~L z#(gs{?(qU5%syJKN<@7C)$heAdgZXQm#;5>#NSz3 zGbHsK@rxjnpQ%V6B}WM%oe#Q%wLs97?*+=V;RWNx{aNyDQQzgmUax(#zd$;Rv-f#; zg$|v2AMpF;KYY0j_OZ)++TUYH`mH^y&pEGdT1z|CcVjo9mRT=7u}wPv66_X!#KNIM z%ro*Q(WbLpTmR9bA_G~5alV*8=0tW}J}U2fKerQE#sQjiV2_n5`Z@C21YGZbK0N6! z-Nr4xzUb%Q$wCZo+HtgP(&+r% zP;xGLYCW_M2sz-iv~vgO9$Yx~OmJNaY<27vG*M*EgHV@DOkW&ZZL?@Km-=9+TqQ11 zVjT2Qcz-G%A+geFHSTnPu`oaOD4XDcey#awSM6$`3Bi_2q0Z}aP zEOUAT#ct5WsJr{phL70(jrBgbvmD7JEQtTe&*I++bfu0ceCqxvhpbG-o?XGA=;|r0 zp)LS4>BBAIQF zrL3rB*K^m+FWIdt>PnaJw5l1%kqYC8_jf$hzDF9L1&g8qZ)u+z$3FcEJwrIs4ZmlB z$3ZhrIxoxN>Z;OUx-nbJNy+qbioihTcqEzK!&ziS|{sbczBV zM<%s;%bITXGh?Gx4#fBz>bles;PeSOd+7aG-H6$=W% z4kKyDR)+LGc+O5~t#u6Lx`eSV@J};%^gTJF3}UDwF~bm+kuM_Xg-z@d_SCZ8;fuFl z`Ws9+jz5mk=mo45=*Y;yQjRQ^Shp6Ql{^y!EpU5BgX$z{f*h7;M8G{J?N z(KqA$&p^`;wLZzoAL1@0BGVR^ueNYtE6b5=B`6M@IriZzo=)k;PN(ad6*1%V4ZIS_ zUc}U09#f{T$BL%!zMDC>+eBAP)%5iiLAjgRvODOg+ZV;n1x*(JZ8&Cky0`#W=j*+gZ0Lu?J*lwTE4(3)wf|_Ghd-t>c&gH*+*N;+jW&^{3SuV+$1tA?2 zn5SI*D8~5twEc(e#gAj()vT2S_n&Vta#PSRM}@t-kwT-|`W4#D3^mqF$E2LM|Wzdr(w_F&7|bHigm{Px!?R}2tvu-p2DdJhJc zYwtt9>MGBaEd;p;E7lHt9qmOhmw$JkcSx)HL_?z{skhlbLpya1g$-%741sPVG}`i` z$mhcvgZ#Ih5N)4~%}6$FBk#x^DogJ{HM9ldcNo=G4L^C@UIEm^m!>NFZ>5J{>rbpl z!#)0;uK77hp;?p&`MLiC--Kzy99;3uP5f^|hel_M@zf2~D^^=R2HRkXB<@w{!%*4y zVN3a-aQVH#&$6hgZuiacT5VGJ?tbIp1R?M1^&QFBPcG*N5*O&qy|U`eOb^m_Kf0o< zcmaVOTj2r`SY_DD`nvotC{q}h=#RDq^%MlW$F^ISWKJ!9b9a3`OZ9H^+4Ir#%~V|9 zG&wFKZm`8C9wUojpL%;%^Jh56Wk<7Ubv;=+AM=ab+57tx|KL?CvXCkPofPV1Z?bO~ zzJ?(b3jj#c8l{s=L2*$s$brFJM|po*+fupnK6b)}Kkwwu>Zk54OL=Jf6*OFTP=PmB zF4`iA?g-@j{sEIr-b`08*wkPa_hMS2=mP^fR!0B3v6s2J+fLf4RIFUyvDLMu;wf(o zBw;Y{zn3Nl$Y}I`8}|5lC2!t2weT^Tu>9HIV7su=ZtT1;tp{^r%;A44g;;;KDsds& z?)6k~I)8T~lpLqB6{UFA@_+qkYJvKskB*Yxyq;BjU58_aSD#wO`OB8syo*^m`loa% zH77!PhiD!h&Xt-;KG9N$S~{@MaI(=Q%-}4WVE{r9xvb*BZ%_6;OFuqViTEo)G80Wg zYhA3ZZmPQ@M}pd!sriSpl`5y##lDk146U$kt%?u7`kvt~CXn431ST%4MDJ;`=f3|o zV7~3-tSls#`nE0Xd~?C*9Off9k4bT1PFL`P^<|NLn;>cnRC-zp@Bt5jajp|%nbO+G zC6~sQO*%M5Y#!L+!eS9+BEeXCUybl?L1|XP9|b~CUpQ2+vuaM&Su2E)CE!By1K)&< z_=XNjt#kUNt^aK>^68yrZ1y==lY99980W4|c5YSScWT<-mft}$D0YbAweW2AxXGDuTDt$-FoN z-@Y?pDGc~ipt&><&Cuo!CEi-UNrU~O%F`X^)78{?s(8zZ_*=i_9h(T?!)M&lh(g(+idx=JVP zjL`}a(&*#tc@EJ=O&)U)!klC`Vb=VH-O)wzO0B?KryN!!2IN#P>pA=Z8TPp2r~+g0 z>8b<`Rd+s{S_mu+DVc5i+k=IUeD&Xk$2X;#3FKWu%C5+YkdgB)oq<)O2?xmYSVN3f zUKcZbPpVfSKtih&7hB65;umZQ9yG`4oyD217JtI*hzM20d(oFw^g6x1MP$0EtuysE zw^;lHeuRTRO-G(t`mNwX(`xuj(Muj3+;M{Ue;eEmpL>Fmn4QOTLE^`hhn)b{rjT0K zT9=`BOk9l=NmxMd2e!66$1=b-I}X>#cuI|ni0K~SP?f(ifsT+m~KI6FJJV*c_smX$sA8r<(;$4Xr4&P|rOo;eXa$dViIt!wRFmtSBn zwKs2j7L)d`l zy5xv~sm8e}vnNIRe(E>ebcs6o-sOH{(5@&^=5}&Q2_jsutes#454U1c7Dlhjy~N1W zHDn0u`=iP}SNS#|^%3RIdp(~U=LB~@4xPeofEU@&lG!DG8b*zgQ8**&6ol8`>|F|Euzjc}%)Z3(Qk>1yV<=$Y7*RsViQO+?C{e zAS{*6BveaRSxR)-F` z?>?HK#J0UskBIX2DDx;q@QJ#c@$uV=iAs4mPZbd#Q9c!>Yf2fJ1*)@ZSqWTG zXYX|{h#qS2MLplz;3-E5d>E%&kBMB~9y|YNW}VI#;8+rRYkY9St*VIxlz5p1I7B1 zvnU|m(z=3MMPm`p3nh7L@yh}z9;jx_)xbZ13i-`{8+^?n7*sh@+F; zp`B@ZecT~c3)KgE$)e;`y@5Yh7^B?Gw<8<%a&g2+p@0Fz_B0f!Ok&-jSgNp8J!Fj< zb9ZZQpzpTR!kBwbf0GdyIC_$v&1V@je>LzaxjpEo@)u`^g% z>tDdxjc#q!0R!l%m;;+h>wkf_x~u$?A&bQ={T@Rh;b*auZMngT0azF*0K0Sn!ga>y zDZ=g^Rbq##YK@RTS9|%9(m%3)W_OKN<~cDMplwVG>@UZ6JmR{7Ib|$p;K4wSVj&BN zt3XI|?izM68ZhHw3+}QL(}OWT1wRf=<|o}|vDgN~JGq4++9F~HAcnh7rM-7Gzwm`{ z{_P4j9n7EmNVPsm&%>h&Lmnc}DN6n|e;#79>*kM}F>3!Z5T949^^ZB-YoBYP#e}4` zh3~2*r^m`GZkCN4vb(%Kk3QT(^4WN5<8)Xv_H8L;lpVHj3Fy=Kt~1*cO;RRRn#=9@ zBhA&qxxC_R=`)b(@+gg+N?Y8GploCT0R}$?CTby8_m*lV%QM3M+aNGq96O!>K+$%# z3t~jqK&g*)ULlOSNIwm>1ikj_)*-4&j!@@^M$vU0M;7V!*WpEJ1BG*xJu6gNYf$^2 zyX>@N|BfDq9w%oIK)=)=k!JxNvlJTkxxx1MJw;Qc|1f0uCt;khCd8ZR{CN8VTWAiy zawHXQrq3p7DI*UAj(r-P78q7 zp}p|?S4?oQhbDFC|Al4nDvcq=>#x3JAp$T~6mNUq*?~sWnQChywR&HCtNQWBPSm-? zAhKK7DOhNuIwiqig8f?gkGd||TZm0FO+J%kaSY{dS(+wy9qh2y$3-H2S7a>7u|t1# z%_j-Qez=!;^?tiuCw=n^*O60evFAiCC%0o9^X9xVgZDn(PGx^{egPqnh7QwKnP>f< zDYWO_Wf|6cq2>5g!VPnazc5_z8St(svu*`LgvTDI&mexDF80qlnoc^&=3=uK8GSe7 zy`LuBx^(QYuK5#`i!gcxe6D=JVuqq ze>7HASu#yr?1ob((XbBqb_Hq(G5<4m2DYxUz2!8CLcYM(GX7D9SIoyo#rrb^LvgT? zHOECM76~=*XbIF0Mq~qT{_oeIgMHj;W;kF5&Q(ppx6)_dmTMYD?taSNE^MqY3b(h4 z#pfl8KT1ayg7r~HuIgQ;YoR^W77-Iwz|azGM<+9Ap}!5Z*d!b)n07cO?^tC+ei6nf zmH7bnaGv5)uYYy47{LJ9#aWRgogYd*WcuJhpE#7n9iR;Uw*e?MOskU}9Qc#vXp>0O zwMHk9OZvNLx@o^$sV#T>PLU6oERt0#%wWi)S*)De zYyRJcg>phVQn1#wBgzAN}e0v4&)xIEdV&yQ%PdzUf( zg!?cqzrZki87%(BR?2TZJpKA|GL*CsFZo?^Z~!_qB8#L^_6utY7@PT4qZj*Z-u(`f zDb%uF;ugXNElfvEJ4O=*p05}xFA%QPgHqCb9G$%w$IYA!a!{~(Zz0$H64rPF`nPUj z8QQo5ueCgIWW-vZlmq+h`J++lYRl+FLtIx?e5zR9-DX;`j9n{t+M2@>-4MK}OMlj0 zg(`&xGn86?)yq;i@pX1RSMAS~4No9p;%9=Tb>%yd7NeKFtQzt^ud&Bi*lYOH`y|JJ4Glz1asOvQ0r_e+iORhcM z@jyzb8B4jN9yM>aWTvc1DjIRX+D>B*B)b62)T$l7OGr#RP~8uV6OHS>11p&4R0I`1 z>dW5}I*aH%Uz_xLCJg9(d7K3+LmW`6M7*F9jAoxUeu0K^U;%e}u0Ej6KD|^x);Apr zCK+4sy5o}k<;%S3!vP%cT%^`o7i_VS1a8$I&0%Zu{43mS*QUotrw`=1OTLZV)}wfr z$_YS2v3s49*Q@Gj0w5-az}lUNHcHbh>n(kAQEJf;8P_*PsjpM^hF&OhgHL_t;|~r6 z;-Gzc0ids9vbZuiXWdg9yJAX%@51Q~;Wkx$#df#rI=mA)JWykkeUq-lR=u}Q|0?Au zMPt+(ECzDdf#&p|f>r~}G|^GKP6SiJliJ&DYBr-!z3qC2(eAGp2mYiv4;r4lV1~m- z5${(;S``7pLDauAtXe(yVLWl#cfz<;vAb5}_(L1EyTdlicGYY-tH@}Ek#3+?Ma>Gg z^Qt}CHZ;o7AK^68E8W3Rf>jk7dlWGL+bq)51xVIB+R3nbC)Zj|1o~2}4Ew870i;lQ zN1o;z<5MMJCBEJ(@xP-w84fF7zhjax%eNVC{r~s*Xd^n&#!YL>4O^O(Y23CX!=6nJ zgV}?fMn>#Y$p%J;NVE|QV)H`~j;;fZiAy_x0ib+nhXnDxGIVmY=f^m$o0Ra0M^S{| zq+1HFtVoqJWs3=nWT`<^>iPu(uV^vw?l1%Ez-8nt8NT6n8!h3}gp3FN8i8+ZhbvgAWr3)XV^r_1#R(&$q(Dmf zJwW231?*7WVQYj(HceSvOeZ66VAKvaE@?Lr#|5g-fJMk9E+qWc^sa^2ZNoset%m@$ zbd+-);KY#0{ykpv-7AkjPHUgWwyi;}Zv8gyvofIP%z?J!J(ukuf;+&Z@Hs^xX1V5v zKxSM3(zb&BdVBUUGZ}|jtVBDY%R63|x?%ERF9X6nXy>?6%GE?xpX73M(u1q_KU1$w z?h2A{Bw~)`qmw7fJj#ErDjGK|5wW11ERA&1?T+&u0dEvP4G1jwHFi5;jD5p%I@dYmfNyd|J$JYR=3<<)7w5w9U{oIJ+eI3F@^*#=yLD*mz6Cg z$mhe|bBV-ojqSYO5P8!H-y~N0^ogrVcLZYvG*tHN7?4)vlHS-;7PAIe{<5|$$mZG+ z6R-&_CSex0aF|dq3%Ua86q88SwrrtJ0aZ_6qaZ1Ai*V|Of(Lk0s^Z~|0l0_5o%}md z>)&$6VY&K=G{HGwZ2OrCD!!ATP>7+#*HkGRJ5K9Qa_WudLif*Ll7XHJ1oOq`h`;TY zCKU62z|QXofOfCXV36Lj%DtIofAcP+gf`u3h{&>v#U#P`PGC;Z8XuGnK3it8(RryD zQ!XKchGW>oR09&<@I|kHk8LY9{chpkF<;lGC@aHtEZB_8xry;yQeddyy#e>C&D5eW z!=mkMjzHxgXK{xM9qfa3D)l0}S$-5WZnVn!)1>H#2|T*BRc~j>B122*u25S)PeUxYWGnS_;8Ir&fI$Px^ykNw6WIIFg+?i>l< zyW*tWm&a9^D?D^s#2cXG7{tTmh#hxsUZ}A*fu~2C)uIl)DyV-*jEQJAu3!bD*$ zq|k5rhyGJ!8Yy7#`Cu9^rIpqdT1BB>z}`aYxY;aqf1`eTt(9HJ-?gktW9b3v92-uDC6zuAo^b9 zB9(MC9(?b(m+z$=syt|}sM?^_sS|5PAG!IX1PxVhrj zC>{an-$8w{{NR!6=DE_Jo~|ET^RvSfi28Dc=NlPpI_*J>L$EP;Y3Hbo1DF95&C(d~ zqna&P$$uL*obE{=blfLfkbk)@Z4f>!ZEUo%?lt=xJB{MW@MfdbLVU>6;I9Fm{HHgJ zH=EvT3&?)!JJi|`|BxKn4$(iwBQ}53H+Rki7s@|A(^P>}{eTV)mmuZ$SgL_l(vNd% z?jp(4h^0LR?zL8kd5%L80VSZj*>p}BPY;m9t;f)Am&q}^!xWDGgC-^jpc4B!%m6@a zbGsPY#W8POwVzaR)^=FIi&Ur8!4)~Dt67HLoI@^{!O0{dhB6U_H3MI@cwUzk?7wkL$7DEcl!p$|K4ei;Ne(eWe$EMXz8D}OPh#IqVHZFZ+wzTUC@~sf z2~~H^vF%;5^-F(g@zy00j{aJY-hUg?KYRRSeESy|S+ut>F;*G2gc#vYZim}2mn5() zA2!))5~7cYyD#WkVN{9VEhOwUxI;;iWh-DZ#|~R~QE2H0Rj!4IMk6P4QW)o>*s%Kt zs0VslJf}7h9^`ryUZlZ2Z=Bk1dE|hO6c0PO?6Pfw;S__`VmE50S{<3n|MK46MC~X& zu#Pre+dhqXLwQrx5bK2mH?z(M9W7BNW%%EjhlD{yDicQJXVpEbplwRswv$mA#X$Ny z?N$UmN>}XVAQz)vD=mbJwo57YqccWPP!5P4@GC*etX4WHIZk?+bR}OuMyg5-f)VO4 z&)S-x4x5&doxGR!NPU@ihPoj_wT`PkL8Q%E?m3?zJd8RjFoKdan|kc;Fzc zT*II<$C7@+GB^-|@#WH!SSl9}s?^=rBmd8Y?!rv&wInPek`5c9q>MVhY~qp^68hTu zJ~GW?G>}jgXl@4qo8{gWUoUj|n)#+S965q8BTQm>z?QLT1Uk511n+#8pOR~q>7X~2 za7OEZ)!8l!6@t7s^%YV=#=t&QqK=LJP(`iS1U3 z;hTExseEsVyvt=qaT(gvW>%6r^R#R_GyvV8{KyAU#$B##b>Dm9!45Uz`GoCH+UGIU zFCKh>X2LCnt?v^mng4}sx`KiA=XDyepS>*eCM&0rADe zpEDDZ#|~tjTZhzev)>Y7=yCQ6l;Z~fMt&`SM4uBeac$&v@UrweDoEgvx&F7|ylpJ^XQILXcwm+A20mzQ$Ezi4oV61O^%kGm9&r%75=P@+8O_xOcL%v>JRbWrA~; zg&?jvgk2x~*ma^{2gnFoHD7<7YilmD;^(J|G^M7#$tGmS0ozRa^zI<)1BY z0t>4|%Wnt?pn6cdKXY!FP{!3^mNSTXmW~qV2qj+n9C+=IqxdEcT&7A|5qx1TiJ!HY zmycaP+Kzo|87U4c&q4rpMIonbCtR5&VP4lsz1F#R{T5oCifQ8WFHf&nJmFnlC9@l$YMH^4NjAD$;H25*3`O#BY7}0V5FpiHMW7r@`|E zmC_VsQ8u7LysnID%S^a_3+vZi-?!cv8F5^quMhr^hyw@;Aw4^BTVyY{+z+X%YNQiw z=;R-21|GW~_qR>U1l&FMe-cZJ$eDA@N+)G%H1(Ts^RQC`BF!F+!o#D|4gG{mF>W~D z{qlwzCI_r@`h+j6gQldO8cBhdsI6Wlf)?H_UOw0yN-~`; zSYt~hkvtjLq+{lb$6?MH7ay~re8XZYDrs8q!A z{F5q&{kjdpgEM*?z+P274=g-=Pl%-BTcqUEE&ZdH?WZ!1BQ z1;yzH*Xi<$^x5@IFeX)7@>Kt-z4=St_2t+i5?Zm4&njKM8m1p#cA?DV42T7j33%sD zt!JA)!X$eIh!8%HP9%Ow;%Lvj`)|Y9G^)E8Lrvo`$aT?RPOCm@|CBnQDpyARiw5Ix z@h93fFWxy?l-1~qt!>~bLDZ=%t{S-&y|v@7hn6<=;>Rw7;#PSxg%ZLTB6>`2A5l@P z18ZOug^6+Nk5QQ-iTbY?v{@}xGr_Y4O@5Z9MkN~nv{qac)b?-p9=-U+28Mntm!H*! zKfPdkB|)7=)d{6)C^uHhvQ~Xk65U5g;UW4(j;|1}Y`N&ozuDQVNX=!e0tY?bH3JB|B30KHb zrb9GeJNLV*$pw2#0PB~(X(hhMbEwAN!*}?Z-OC}}YuOP)ndc)qq1_%2_S1G&TnbZ% z=o0FisaU2)|2SfT0tll0Kh^Tre&Mb5M24cmGYB<+ib_C=_+y686=PFQsbZXZvTaMj zg?!y9{6JV9@>)}mswj=vWHTf8?67r6lV)c7^VV{0{c$gO^*Z%sC_sUEmv$~wGf7H| z!R%iNt}YvPA^My_nLHzoro^B!(N)+17vNwrM>U%+q;4ae|9C}=a!b|V|J6P{zWm^A z%^8Z%5$D#5f||PJ;H0JqbB&y#(oB*v5>!QR*RX?+J}9rqRxC-Xxs<7zO%tKNUe^Hw zPq6wn37&|dE@KvXkZ%i>kh-@wtVLaq^NVvwB;#J$t#QWE7vcLy6XNnu{6C)FJRHjX z{~vcw?~~Iyr$uC&)2b4ZWH;v=Cr(LoN+H`MNyrwm&fKSj66TapwyDG<%Ov|YlPr_O zl)=~s6Jw0Qn8mF3`M&yG*YEdFSGwXd_kF+a=kxj4p6>{jOIRaR1K>jl)4=Jt_i^Z? zO3YIT)5BD)ufUCch~0|L_IrwcJ*}qMJQI%g{BtzKfa&mQQ5+B(8fs!Wv$4|6HOj1gsY-%?3Pu|BY+b2&9i ztK7>2n*$$^0;%N#!takw7#hnowN-2M&T(2GGnEEDC|{VW7^(Sc#)sR$9MV z((VILG`h^!T0rFd59cKhPn992(QP_b(>THj<|rzJQA-l$Rh(hLo-)E>E0Q^I*QV)e z^W~_#u}!5{Af|NfS6>YqbMCTKz{PylK0R|O!DVlqNBbpg-If-Iq3~eS2c66Xj?x|N zR{RF)1EMdE%-*5BDjgO4K&cGnE-SY_d|xLBu-%&6&s9dXksEAIw_QOI&MD)6Jd&Lg zgc!9QAxPE$WJPM%Wj+vhT5c*UEuYTzkuRft^O372GR^G1nH$&gJ3CPP#o!QCYu`db z%DEIZ$@pPLq2BQdBNnU5QZ~Vx4Z3Z+l%eKxW2ktOWv$r5eM|)PTe;1o8j0NIk3%$i zZlm(K$>wcwQ7jOCzhd0RmAG2wID57mbJ)souyGI3UsKbwxtr~1woF0MDANl;pT)urjD!H#qPFOVx5>S3y+0pV9+!Zf??zRlUtZ$==P$WDo@4_C+1oz6)IKXe3cO zC6HXN%I`sD?Z>p@3sb&#PF$vqv?A~HJq%~PA(5|$(UHM6)!vw;SOlHhz1K$1*TH&x*q5t)0Eym>F;TX0mvr0_UW7M~XoiT@`$zHfJ zlJY0yWA;4YRK3APZZOZ`U%eX8& zgL2x+qbF^L6wbfqyMT?QYBhx>V=kxgTvGb#S@_Br+CCI%mGoP`wbrXq?EC~lW66ro zQ;-u)2er$#Jw3m6f?Jv6brg3XkALvT{6KoaL{P2h2KN&~5Lhl6?d0hmzFPkvVy+${ zI#YKm%~`dOEoCKquG#jP+!m&}=)=Sdk{2@;Z8Rj`aHapXbyS<)icUl)WM6m1W6whs z;qE>N6^1S`&WK0Mk>IU?N?IFgBtN4%!GTXW$K*Si&705}&(&nEck_4HZJNnh$2u}7 zUP_L>Owv;pJOz!j7yjAQ(dcTv`aQmPw|8=o=*ij`LIyokd~)}4FwHkB^q0voHS(>o zY%X^^lS{xz{CDB^w8&j$pl$k^lYyfUNTfh1>jT5s^M+;A_$}71t}#_o0#r!xt{n}C zD;KN3rEMmo-y+X}A!Sh6S~wd~=7>xza=ECPmC0Q)D{d|`vQxT#n0wp-Ql1NdTM}TP z7?HrXPU1ULVbzALLoSZ6n!QeHX;tV`3y~{bjp3@OoA5AdSZbRBA7qne`zbAmBp3nr z)M*Y5*?zTL7Zhr%VjZ`cZBDB-2xyH)SN zYH$W+z2Z;uamncwYUgjz$L=W|gx45BwmINVQ}on7kT-xFgfFKrGkRK1%$)9z;`+sS z%9CvY-7=@X<6+EAcPw)-Z1|?fhY2$ICTuLb+*jv?{*4 zZ&!Ux0GV`trd)4JDd_7GEA3;D9~p@rg&S=1`J9Y6e`Y^QmP?4txm@vT(NoLc1?cY5!4pm=qK$_OO+d0}6Q% zH%E9u(7lj#w$>$CXdL5Dx}GatYK^jz?o~3SRAebzfJ(pOH7YHr;uWH-34%w7W4H@j zXDk6xpZxz%c&4(BYpg11iDq+=em-_0Wl1Jm`>x4z)w%U@-!l&autUq3Iqdn-yVgGT zao2<_n;4NsAJo-gbN$B4deAz04Je^L=-l84dhPMI+XJ%llA482kC|h(d6mp)A^zx4 zl<^JNbs1mKhmi6VO)|XcpimI4*}JiOOPh{TF7vIg*I35?){?`!aD_kwDlO|M8w-wS zVRfW|uW}LuVlgP3fp4(xthT|KD2pjvXXfF*vksjvmh+6*{k^7ds$k!^NOEZAP6`Xg z5r8uCB`v4VdI|Kj32X4P&e-Ivs5vUY1}Fq18=0dv$yvh$b{yxMp^}Z^7}|i4w3c$L z{oIqW`TBY^%5L_!^@oBpC!TwqBXX3d_O)-OEKnGLNt7GI?jp@(k`@*Aj%BLPRZ z3}C@ugj|{MO?anGdEr@dK8V8L!m$%)(rvW9oa7ES| zm7&UFR7mK<|5VgKb{JtpOm(N~MY=U=N@;bfeq|{aRV{QxNwT4iqKkvVN&BkGh%_0? zHcOEye7@qijkPs)`txc_`<5)1_ke=(ixulHkklT)pUCR!$>&{S003!beNu}X(51iQ z;1Ve*r$4N2)3&s>t1mp%MB4T19?B;5)d#IF$bhl%sh6=EF}2g@Eoi`me<}Y~k|^~9bZD=%N%EpSbenbMLPyTq{Z-h-f>T=bLgm#ncUsfNN{O~j(j-bp5`PrI*~^;)iX@2FLPJ4(6U zI(){M@G2OVI~6^Q@bt=IdY)3hdx;dBaBJ`;{W z-21@Xk(vsbWKZ!m6sL}w)K@Xp=;!c&o!!L?6^B+_eM>f5X%}Pps7}NK_vF0kJaEXT zRaQE75ynQpX;@+l)_RmLiiLWv`x6T%jK`7WC0lil?v}6RLYwu7gPS0UjQ84Gx3z6; z|7{@CdT?qb@jc-u#eouu{`=f(CoSzC1+A?*DcRZ!xos>=tE8E6dDk07R4zT)BLTH#3K|IucvWRV7$hjtjRS{deTU8VXTjx-7lKQeBLDR( zx~@;YmZeoVj?K9(gH0*QAlN6Y^=*S!ORcRIpV7@)V_Jr&EqO~9r|=g2$wY0iQ5>V*t1>~|)NUC1nxL=$|=?925p z{ikwGX=%h|H!iS840jzb-iYe5cG~|fEnXIfST%;#u2y)CDsyy0!7CSb@*VfWA?Z?_ zjueSs+&Rm$-9)B-VCg$<(tKNfTfONs1E=w&CgSI09oyx<8|CMIR!tMo6<|B_66IY= zq7Of(lZNOtZ1pW}Cr&(kBz{PG8>pTSor|Ths7EFK(^#_^;G$3MC)hH9vY-i(R4<0cnE0Us6R9b+#tl&2#H<%21UXrUC=VMLcJ;Z&b_6wyy`q@|@~WNPceuQho;a z#a;F8ffhQjvhMHz{q`vZg&ds2t1-|EJwoR!5zS{(^(2JPR|TIxx=Enbs7Thddx*U2fqcADSAeIm+jw zv`7+A8_ikF3OgNsHmV7FEfmVGRxOJDy86Wkq$`uZo>+tC>%Alk;Y!rUWH)y0zrRPT zXPOPLQ0!Gj5WoBb%B_Oe1QQsd2zgKv_u?mTK(EVxMh04b{ijcCu5EwESj+~Igj1cnfuGnZ<@@# zUPt+y`t+op#XJQ$fN$;$4W?IW+-q~)_xw*GCY2O{~yLDI*H zvY^Px()u88renK#>BY_qG9@My2o*-jphY=sUm0XJZ z|H46}B){&j9xI%Z0y0auFlbYJrkeM<RVJc^mmx*Pz#zmdT9yV8};z3Ka9f| z8Y6?8#g%nc@47=QIJ1Nt|D8tEsZw;Ab3KEp9h4uqF3f99&8^i>qX>%)Cjmw~>x z?$-MJRze_6t!R{=+BvB+bL?vjzU<_Z=%Lq9UP;DRS7wTuDDKI^blD{_k|AnbT`W}3 zX1MiFXKbm40>CUoY6O#dkg#j4HxXT(>YORYhPZm8bE}Db69aAO_JUqRrH}F$`DDTE|`EQB5{n%HJ&1tvd zecw_O7+o>XW{6v9Cf%zyT}+fO!RT=S?q7MAjXW4yd&@?N%fF2( z2|}<^z})q7!XH(~xtiB7V}C=`y5dm4l_NqF8B_d$?_RD3t9*r&e zAYP=;T!);vn_U?sP!+#dKCLAr+U}_yGT2+V{DFL^v+Ei3QEu~}%DVIYG*8fZKA_nt zK|V&Bg(39x`(s%?=t{llDRu6s-+jioI(yggQ02=rhd=4h^q=sKAN>yjlQ3$ggB3+faT>P2Jjh_x2ku^`w}$3d;R9JL=TgiV8^c( z-&`5|cMxZqpv8BZOpleLVd~na(}UDb^rjob;5|W^_VDG|?vyM~SNtsTg|~t0_Tg&6 zu}Lldsaf1P_tUF)P2gdw{cfePw9xkH)f?U?7PxD{{!9gMJEv6R0xkix+%{f_CHT)` z!alUQHw}tU+HJz;^eG z)c_eg=k|XRWOp^f1sY)g66`6or&Cme$1mHB|_0-uL4JE}2@ButsZ7`){C zoYYHI%9Dt=++MVvXo0gD`GZ335u7X!vcJF^G&*afbQN>3E$p2KXwtx)K z>GKT*2!?FqMZDgAA)62TD_|3#{>|w-h_Jl5#*_FKvW3Ez7H9h(_@UT%!4SndJSY;! z-yL!B+um(bRXs4Sy@t&>85YZ!a|t^nwWTqRp7g8Ra8ue)5pbQ#szKmxuZG~-wo`Dk z42MaxkQDWGt;(wpEW16%MAeJ;wT z7rAmp&@J3Tf(f78S}yTk^G@xPvE(R6IY{njDcCk~X_`_8YN7f-i$~~*(c&y>!D}yU z3Af(H3L*?q&e>`W2Q(qtpzUa6=6@>S5hjqt)H&!}P%qq8w2&P(qRh}WaA1j4R}^s8 zxA0oaVYhGQH_^*Vvbq}RTPB=m6^DDmVh2NxcU}2UCEPtZAnDV8Dod^l4W4wsFjqOS zRg+s!St*#!DDz#PG$p^@ zKo6nJ0NP+X)`-AmkW7Un-eLLfgg^L{f0k{J#2_xnqX>zh9y`Cekxg0srJ7EeL&$tN z++0OAokUH!*_T(|n-KaFL-x1a5Y_&I>%Ar6ord<0V4O3QR_Y(^^oeeL`v`Kc2748= z8Ci0q)13~N&nC;hX?+m(17-A?w?3f9tBCSSeKqi85ckCEb$|J$i=siHm_Tq|lX2QP ze-HR%p$lTem4x7cP@WWr%n9Ukd*p6Q3E6@cQZ1CPw1l-?Z^9;dAKcR`=91K9-WwVl zc!jMNiZFfco(<`wyRyP9?sa-wrJm(QB`p>8#iTOV=vgmx{GJTtw%qu@L2RowLa}45 zeR?^iXeBf;QfK|AWA=nVO2E^*(2Ng3^3<2~M=N^WI=$97o5H#m36w*h+16Jw;{DUY zu;f~4r~CxW(A65M;W_>#$s&zOBF@(#`A6Y?7VoxBPpMopXZft83hSHLT&R=i7$*2SM9< zlR9449+D5OUF#31(5!}wdbr;ti%t0}Cp-Qw@=K~a*h|D}ljtu+0-I{L+LViQ!;Dg$ z+KEz-NLCnQ`%LCb0edKSN~WikC`bNKwK5$%gxrUO@ko&dw;L!Ne#g`{kvf?v?*7Rq znAWpq^T+g8Fwt@C2 zjlbZEg)u$VpLN6Ywx7!OmVn0%u)=pgS<>)%zThv=UI z<$lEpK)hOD%^VEItx1Uyy+tXYS)d7&4(s@@L#_&Z6)3{KG^lWJbM)W9=?UZj7*HwB zB(HK&3!tSC!IT%&Axe2^S52VvH*3qmx2r`omFkD9N^_8=fI}7B7>C{{P-U(b{zsWN zX^N`)H5+c+X@^-!#&_j|Y2Ds}5(?>2;rd$5r*SQW28-ZZGX`epmVwIO4-?0*M8;hU zYXQJ$ETz{s9jeVCT%siV8uWf?sOw~?q8BXJ77-;uBjjHLDWk5t;==w@`PZhEDYHk- z9e(Z4w=t*nH!!z2`W>uSYe)`A#x-Gy(=^?Yh4?Ag`4KgXS!sx#+D0eW%EAL9P_xg= ziMNcnNv8>N>ZkGEV#UPF?Er1={bMaQ$z^cooldL=I3MRPmw?mq>=_qi)@w?tA0d83%@@Q8IEC%>RC$*oUPSQ!>a1ibDHKM{u|jfR9&B4jFT?I`MV4s*1n zoMh8>B@mv;*4uVO68)f)QB$?Oei-Y3r3T3VjvysfILCNj@?L3iHZ2x9m)TOKw#C z1(%>xP4gngssmf?bq~V5VmIK2GnhjRtvPu;8oob=GL<(4Qci1mmFFKoJP*n^0R#2$D=q1+ z-{Ne%OS!XEOIKF-v*IsGDJk^u6C|ArFoC%7y8}IpmG6nJHm!X6+J##xO4}3#pJ~H= zRo}E)r0Y7ZjE6F%jP(2B+)p|;u?KfuoLzE(DHTrmw{}6HG`%qjhDv%pSIQAn0zq?G zVp~8V2w0SafrG2@q;cvvUmAB?Siv(q{4%q4SqnEk@5>`blaD=rUb36Hn<*|kS8-;H; z^ck6r`UTn!=8Z*?y5(@{h*MDfQM=F$_lNkM+@(6+(oBy0ByZwWthJ=<@RM7km%Fqi zBQEM{qH-hUPqSEM*wb>?A+q2p+19yNg-2r#;0P%UjpYB^W9g)~>??RF?1OT=AzJAh z)5R{^Wy4+@EY{Lf;h?TT-R7Fv_}tUiDkDCPy$U>&wzWIgWa)`^W%D4o;y;yQ(6R)M z5zveM$FP?8^VH{Ww%JU?=+>m-LXq!LeSVrNX=uqnsel8~YP{ z?RODHuEQ?C9m8da+Nb=}7vWze`VM@1f}t)@6w99I;Ha;CiSvkXW+KL63?ubM+l~XL zK^uoEouTZ7uHo;49iMj-&@C$m%fi1&qPutih}1io9XDw{Rk;3#|7}%y-xUX!O-z2+ zw@k&rWQb;sK(mb0VlH^1I!(iB0x$+t63_b0X^b*+r>MQz#(T$hVp4Q;LRq7dGELfk zex_0bKuwnb3~>~pI50;Lt0w`O7fJq8rw=rWKlfEMn|hiI#<*-0!ZU)Ai1~e9R`@Jx zebi6i@XHY}l*7WDm_K>)ad824xkT=usHc6igzy5ALrqh1KkDG+G;g}nDzckZ3GL{F zY{S=M>|3OJL|K!V(IA0@ zy^lRVtH>%fyL9RR);apN%UBSZ@+~fMb;Re*Y7|-<7E=ta{S)(7R9rBg4lV9^`b_vq+L_1aHf&!k z<=Lc!+VduVWpqBs=|Pggx1X*9BW`oNfB%b&Uq-K?TID_`DFKZHT_U~*m2DL%*drd< zT*5xy8=ir#jN|8P$A60-8X)f<=Mm!vX#_Y4Q#laqzgyxGGuii$sro)YJkBCH(2`sr*+jeH2eLLfkwwQ=CvzD>w z+IJ)jhh2BHKN|8%R>2#ew@>O%IuF)IdHXvJ?jJeVq8R_X5@Na_ z=}8!@HFkA8vvbB+-R-Qd`ZJ@7t5pW(<@)JTOlxXK?M>U=)dJf?GoCqjZjHP6b%=09 z_9;c3SNSgAbu81+oW5GBKt$glDnBpAWROoPOeN0Z#)OC&xzjIaOhQZUhU7h? z>CE~%{Z&VCld|+gXu4)rNz#;?QH8Z}vC;C9Q6H+-RHZ$CUHX3QZU6lKvG16LB*%kq zSUuy>-0JH+FgH-E#U<2CFc;J@H(Q#`ZQ&8?&4mr+p19bqV-U@IP+9w4_s5mB|FPa) zo-T>Mk5Q1>z`Mq)n9O}u zUK0cj6?Z)I3OqryC)IOBu3tDUmnzePJO@G#bhD8+KyGaK5~M5o)TWnf^Bf!;3@2lJ zD&tlB8ZD0RNIRaK_pdq4m$sWTfZ!%#B5-}Esy-6mc81nbXjT}yRiX#m8eBoHY7Vvu zKTc(KTVK%~T$+>Co&I29>Ulf-q1)#Abt%tD*S{@3p8YW0-;(#$Uf(I|-8Epbq~A9l zcs+GEK5h`C0*nOTG{T6R7jq)s<9ewq67x5&8x9|8HTh7uWhLeA&GH(= z#(-V!E{VZ;Hw=QGFJM8;0&3c0^vT<9Kzdp{$)wQ}m#or!)7D79G_pP6`ezCIm(}vZ zY(4$y3Xc!9@ZdA1BTLi$K)mu&JVo@&h!WA(av^N(B6wmC6MS^fqPP)ayf^NZ z5J}fytGUBNOQW8r%$6!q{cZ^Fl=@YmTT?`Hb;q{RHj+Qo6O}&>%Uk|@z1m=hb)5hK zC*$Kk9->6yCwz4$M_4vk3*IH))Elo>ug)O_V81EBJGQiZZ>sYg4mbIX%~6_HmVHV+ zwCAJlZ3i1DM>-#r8$&4VF0)*!<`Rc$x%CQ*w$xG$xqj4tDh7ZWU0pD9tW|+tt)gwD zLN=YiUkpO`jIX2#@a-8XLfYzU-67Y+&z|kfOd9b5T9@+8Ww4bbv$eu`g>#fB&DB8P z13a4=P;_&I);I0QTi+>{XHRupp7;#M-`>6i1b_QKXhOC*%OVJ_=$oxt5p&jBfgDsr{UkPSwS9FO?)W0GYy%k%a8Lk|# zsoXIPdhR%z)czvVZeCblQr=RlS~*D>W%}#T-Yf;TF}CkyVdAPztFfZWJFy637dRQz zBQK)XQ|t&sE&T$=yMue4^cY%2NFu+1CE4yZxqtn8=V+I4mnvr&IB&w4q67o9UvgVN zurP@z3S*YG@1g#lFQA@{WT9A$;S&>ocj{Dj zLs4UN?t%@!E4DYGZ94sWzeV@Ie1c~@zYU;MQ6khmxD%@#9&5c>nFGX3|8}8hfdc+h z%i*|;ofk;O8(RUWuO@sQ znwp)T#{7uuwz-AdqxcO;NY0eS#l`s}8)Jg92HN-#V&=K9ok6O@dmc2}M(=a@EOEw{ zqSPdMTRhglNu(qJP_23fWrh57;ML!wnOka(wIh^PV1)r=sB#CRFHZo*B-&^h80XLe zCw;pJUy)$JiEZG`y z)x{eWU)LW9vrk{ofS1;F*^ri6Rz0Kg2(51dF}qysxAAG=rcV(E zCtrTxjs$$D1??tI2SWC>7|l?V;E|n{t@rHKE(<;iyMA6Co3{*4#Qb(zLt{5D4T-jc zQQk+=#RRo)2>*lD&v-CxJpAs~4GPQal-`^ETiSO!Aai_=G)YWhK>o+sAQd{MsQbq~ zXJYMrlBi$nm%Pi0+D!xEILg2Tqu_zW1)?Ho9(ix*&SI_pd(@DSP1N@1(}+Yn8eY=g zeoNwiIh#Iv13aZKm8v0BK3j(OmpE!<+T{&+3aum8*4R?HD=S|=d*G9#44HK`6mCx% zqbYN~+Y>j0yA_Fbk99eBhS3;dgP{(ZD|OJst+^nFiOW)%ZuM-ukyLRc?O)I}H^P|b z-*Bc?V$1|s)M6P2AU@Ic>X7Z1RhI(usmH}}>q$X}bZ^+8a#n?64c6F4)4bAz?yGAw z314|GMyR&vvnqR3(GAMpa!fR0_eIYvtSeh!(cJ84Uevu#k3Kfk!uC`yGf1H{8LpwS zOB{p-fMxkcIgljM`7CQTH+MSZfc#qKDI;^%iWr2WRmAU4pfUA;lv0n87f!VD9p z9w`)Tm(q7dH?9^NpFu^Ki}z&$jDw32s-_oduCN_F0-HG`g|P(Z>|VE?I54N6=;Kl< z0!Ck%d841f&3sXx2^B4%sZn+%P%JgT#-_1C6@Ycf@WtVp3EnsVRc7DprHg1RStbEz z6XBSuRiTX>mkue$*_9^L*^e^5P_p^ci4|JmhEeWBGach(_KxB#CL=TUfhob4z$cfb zc4BXIo~%91Led}H%}Yy5xk2o)GEThi)?Fp~ z>n@ZZ>K98aVSC#D3MXPWHN!v6!8P=`Mz7duF_vHysq5MGNFQ(YP+18SXeXqaqdEPEQPp59>uQxL7tulT; z+pzgWm0P*XL5|PLaO7C9aO}?tY<9EoYmtYVd)5(D*TQ689H99O)i=R|ps9Q9X?TR) zglmJ|E`!<<$kgg|f&ofC&530niiincaN%YeB-DOZ!C=Q@;OVQ@Fj|4zo*6uu4lQ^F6D&}k@n z@&~%XQDMQ41HW6(S79j~u59=Zc<;dJuG7D6NBkE4CDFV64QIy6Oi$Nd)%#xlk6UBj z(=ZD`igU8l%0njTos!2Tj*qSu+ub>4UbFi7_*{FBNz8T1C|^HXAMo-9hLd~cMe{3e z3R^Fi0&TVOD$A?hPxBpJC&Uza20_C)2NlO+I3O32>FFM?408I z4PRy%n+FcYUV7V0d;nhQbvdnL7`p$QLnm{C-AR1xZ2LDcH;4WzrgAxi2~$EV!q<>o*A+NRS$wBR zgZ_8c$%_7E2L|XN_DSU@IxkijLV>t@xSnW%`@*j4M$oD)g(e{NciSRkKESI++r8}A zSb|cIXGdqqZc{_DBoQl=E_XQTb-QWORQ&=(V-Pu1cjXa&3NK}*LVfyFzOH&6kC83I z$XubSHQG4{#jhN%ByUe!n65I#rU%XAoI9R+_M4}=NP~=MVI-p+?F7dU_!~J~ws_=x1 zJH)u$7#Sp!;ToF~syVJ)%aWy=^A&`VzrSbev z&N*wXSPHsF=30Ep-7Fp@7+B#K_F;a=+h4S*a6r)j46WJQkh4}`84U`qHcl-9b_X!_ zvdhgDFxV$BO6`s3)=0Iq%ubh5qp-ZsVkIOSY%TVPeJn;{1Tbp4Z)h%kPsUj>3XOp( zd{gJdrWPmYD-F(18e9KUX%IRFUFJC_3ZkUwDGO>SD98b)-gw_21;qOvP6nB{w`^Oh z#^WL@uRA#_2y|45D);OHDG4(-y}Cy7O1Ok%n{Nu-7hTf?Y?= z`(;MX;>{`BoOkE+ivc4Tdc3nM5piGG>umox#z%gNsAz!-6ZxVk1{;6BvUtVefzgTK z5Z>~O`i8b9J?qRzGz$%JAFfumWo83ZeZs1T#%BRiLd2m~;BHF#M}w3XR#!Ekh(Z*) z9m!6Wp*KC}t3~|fT9(k%nlqT|g!R++mRXIC-ez74ZsOZ}~5l-^3u zuXM0vK2R9;MurU$la-OKwKeV`I9bf{6g62C>|}lE>YlHnhag4C(`&1NVQaSzy7av& zH&=?8arnp7Xs7q-@|frgUu%0jAmDm--1I`JwT`JB-gRpW?nX?e_@ShSbQ#>K1#hdN z68d%8C~t`M8tm~}2HHH*{wgK$mE?!bP4Bo21V9jEXz{UzkIghFs>?@~AGy8%`EUB1vZ6MjC%V>0IQRlapY`H>8QOI(+Jv<9eC+K!uF6681^xbt@p zwFe-vN9nxj>qH&wDxW0IS5T4u9xq$wjeVsoILoXTiirz1Xvr=t{KCE$nZc3=<(~38`wE zF3q@<(PK`*8}}FIUkNwA>3#X%*_iBX#ExWWQP9acHag??S=8>6e`psz{ct3pBFtBPE^XB?9F^FUN1lJITvUhAsjqZ%STIC`9FrVA`BnO(@en3z4 zKb6XG21*%N;r`eTTyC}$m-t9BXaPO^USyt~BRWOP2~rcu-rC*vf&vm}Hgw-OP6hXs zphfY69yA$sp-0wc^zM_@6n}pCwWYp>ua_K>k{dbUdp6Hu=tL{#+*6%>ZeLByVk>`| zyHhcmcM?RDf3*7D)~S@ME>kM|>Pye9G1hXBHz*Aenc>m45sy{0AD?C+Q#`kh2sLEi z2z=sLy2a0!b6lR9DKf<}N-wx9rP3Jm5{EuZRu`uww%7bLnqd?dIg9 z@APA7T5P(_D>74xWmM6%B$Y=$t2-$7j>9vzT6`_qcz5U&Q}6%dt*$xj>FYms%cj)= z`~a*4*ZnkO1%CN1YiG{>OQf+=m13_Z&Wi>5=Oey0Pq>g~irlD&tWS{}P2Hd}SK!(M zt|fYQBKU5jpO!weibE%?ypTcOzbRE2GR@R-=LC}33&1!~bX&0&WoeBq>7beNW7B#$_)@k(0?9<-&I81*jg8qTycu#FBAzET+8 z0o4H5cP|H6sOSroSD8@oZn>>+lJ%qu$AlW=z zLbOXawkbb*M=w*b-6BwuexN~pm003h3{A=bU!qnt6|!30(8M4SWfI5925g0oGChzy zE+nXU1mswD9P`@U0_ve|!+gv#m!<;Srhf^nih3 z1$vo0m>&pH%6X643gnq)SN+KLu;huqtx#StfA_EEpP*F0-@SufcEo)xBYh&4mKGX(3Kv*^t^aWs+PA z$iBGF@2jzEbqk1!-}%U-7QGqLmQf`^Zq|w9i^3;q_0nFP3H$|CPqAm18d>(x=urjdZ8T|b1J{qv`F;omZqF=C`$q4nI_2X8e-P3U$z(&e^r z|JUGmxz4%Hh3_J1^(ex+R7Ked)?Z5JA4H3;RXt%So{Nl5)~ZWgf?aSai!qd~Z(}yF zEhfG0eJE$?h%gm1*1wl9w}nM&<(0r=wcT7IOV5q^diQ&szeshI8;H9nAw-L#Z>g6z zVlnR_N$7DWRBSEZ9K|J^y=8(qDamR7ci3UeT)Ozd3oeho5P_$TQoN_hM|(`H4Keul z#rMHZ)F|1|?v@jc*YZ4R3#)0Fj&8LFa8>W147N6k5C&TzX=t@OMRs688uSi)gj81` z{VU)1K{lZq_1t?zl4Trhl9F`_}H>_2$8q+Z1rwzXgH zfNO0kq@2^=cgZK}*MXx4cG(`T>#n2xkLC8(TF2!4ZthxmK6{;YxPAIei~{YmW#GJq zUp>%hG$ay)j&S4RU+>Ko%~#2lWX)R-JO3_$OUkRvZ#or=F5CE)z3Z^sK|0##+<*S@ zg)?vons6mzGit!t6q_6l19;2F?W`dIn8_3$D6)+oq-8hadTAZ_-5AcU+@B_^!^2;> z0a=4PrwCsq$Dz*3;L%_K1lhMy${tR>Q=3YgW8#ig(oFt z<#qU}bj|qgrinmMogZKcB(R!&b=_?E_D{E3Yz=GCZ~S5`x6FjQ<%kjtcUw&YKh0OK zx-py}G;*v_hEtdSQLAkb8pH!cOjv_=N+I?Q@^=}OggBgPYnva%VXVRl{M!Bz#`Xh@rC9-biJ~nMDdSgz%GR%pWFR-B zzrXr+cu_5;)F<5SdKK7o>+!X-gx;^*)yD}5@X+x<9C=&eIPCKL_Lo0@dXc?rD*k%~ z0WMceg+qA2!hO`Lmg$3Knwrh-OtNr z;p8qGDRxXZn`Y|=Jg-Wg83QzyAIvy(3ei_Z1f9 zXFPp#KDIHhmqu8|;|loPF|}D&(zBro+EH&k#Hsyn8!m-ppKiV}7B}IwX0&h0+TaXs z48#=ivw#d8WKF^{qj7Y=?1-HjvA{GL*Bc)GMQBSd(xu|vrxJWipwP>XhNaObx|kqO zq`%6ia9APtEwQ>Ls&$9yyfCMq**s**^JN(OX2ldPyI<^21@1&UOfwbnPE2%f6Gs@C$TSxiEnAEe|dbFchj>H&>Uge$1imKg%f?hJwFn zb4OuR<_>`6D>^33Rhk{1Sg%X=FL(Do6)`(uXYCy4%gDg0O0H5|-uwEdmc_U{d^uQw zb$RGgql{{lc*kZs8ohUZvhwte2CV{WjSNy`c*zhXp{aEQB$B{T=@h(^a6P9H^$f{! zZt->vWfMGsT;j}{O)<)Bo*_jdHBBuVr->rmor8YniY6Q?g{4?P8Zkln;tpi__NFjI zD3;sFp>B#@^+c(jh45(mPaVx^$BZ{5J_4IqcWWqqo_FvJ9%-NS75xvmI0>kKIbQg7 zu+GDw?fAlaukB+VH3QF23fd#!`@!eJVudpQ{n6!PCMT;SPDt*KYZY}ZsnF0El7NKV z@t`yQ<|WHvKL^!6Xj>l2Iwuc&QuHi{=i1F{{h9F#*riQ}K?}5)K-!(n9xlI)&5$v~ zC3ec3RG>=t!@8a|)aSa&D)qT;p|YQ9vt_(zvAc6wYv*Hi!LGAAeOBQLa`ks0Kk`!d zQR`9^LtAM{z|aYT+~^{uNek?6Tl82(P&rR%`P?BK!wT9qJBl zZ;aM&l1uVyiXT=|sLQJJg}QH7LQ+`ETWay`sB{?dR?Qrs4yF!x@S8%07P%&l&@B_;`3=ORf{mh8*SyArZwDxoYh zx5OmNBx{x#*`~5gh_Md_V~kxw){gHR$(J@qX57W&ZHZd33~^LN+Dy@FW5ny|hOgp-^u0mFF}-1Qu}=QI zUW2@(Q~EJ|LAg4^6o)og@}`I$^mQF?`o;&Hah_+9xAH?Tw~w)5?rEr9aki@Jar+CJ z=dMdWV73>`u70W%+$ovC!58;*_=E<#rnSzHC%g?7YrzJdvI6uvBkIzMG=uBbHCXTbX$P_efFk%ID;rzis<}7Mlq9MxpgyF=sTN~ zUPh;PDV`sBR8b;d;r#GXa~86SU>0F~DNK94OuC*03-DGx6aD2ltC-*^q{gDnPlEwZ zkEnKhs2?D30?OZPh|>G8DUzWbVq@E`qm#TNWW~(#X088|CWpWW1G$JTM&i9b;SMZY zIZ-9Y*}z}{L!3tJfP8q?S~drX@4CXMZ|ANOrOL=v{f=-jZ|y)mkYc|feiDtY+cl-F zlW!!=FqhU{L{vsb;TStt;?BIp4;Nk6LIbFXL}r?QB&M$g9~`?>g>#hV#Sy#s9I7U~ z9DO8sChcVTF^${BOu`&!8{~1WJfl5x)pOuXgbaJvws)b%$oOLN3pur|{<9Oob-O!} z3n#L&a8R23->IuFaKt5if-w`m6O4}dEAGmN2dQ28mwFuAM;rI*Q6 z!87S~1K3-%m&(A4$qI+>k}(M0p*=C8@lEgQaW|aaN71^d#M>|#eBpzQ|GiEhbTtdA zMZ3!13_b3cQ*-|8xuJQp`CKTD9+fd+>{xy`^Pr=3=MSr!_SA1I`KXKAU~NKmPwG_| z*ezUty315oTg=G_&1mM)GwO*y$}0dN+eSrnS{v4OA{1(knvvcL4Eo{3B&;tZwB*E} zX0;nKno~BWt1KlX^KrVp@0XGfv^JePVVaxnAqwepk@V{AT$+S!Pi&*BJ#3G>X>9*l zw}nx0;)^*D)Gc7{=44NW;N5@=?)#_q3-5+N-Azd8@Ys^LlG?t6&4r_&(P&Z7ZNzj` zTNmBH6}CV!9OOhFJ?ES((;e2;*c4AsviJ*~S^m}X8kc8P zhkH7Q%Ldem(sKva90z?uKG!6*#g$&CisUq}Ev{^X|8DTO|<94Zd&zAZPGJ z0u8$u?Edb>idZD>N(CelPyN>EkXqSr>|d5)Aml2t?pR!3l|4~t0H`LFB45t~5Hl5| z(vecT9{FUbbOnfF$szV6>FMjp5xOl6pdkD3%8BJG&a-j~C`z9r-{u^dTa91vWr=x9 zs&g!`QdXo*TNW*1fYnzr^OIy~o2wJrQUFRjwoG-}_3&J-+ewACJi0|gvUjfBHg`2A zlfGh|UT0bY^Z(fy0KZN}=bH*2-f;+%_!0kQXi0q=?6(fByERo0R5xxR)|^~D!ns`z z{K0o5P~lF)k}H!NMd%sCL-*H~uw+vpltAZ14c8-j&_l_S0 zM#dUa=WjBW(2pmHdqOu82S2p=?OMEYj@7wiqVX>Xd5(UYoxF5cb3U$AHOb3^6?X(N z8LgDa@BO3WGH&0x=56w5B~tbUFYD0KTdI@pbz4Yw;4wSwE%(Vi6gJi5?c;pHsNWNL z9PrDwYoTCTpRe#Tj{ybJ9Q4|+c%_ZMf+Y@|9K9HJ zR&TWb@_{-^9g(OZQ(_^Ru!mN`_|RpWV!wRfeD}<&o;t7>K1Tzab(7kxv(|4TOPeRi zRjUr#UFLBHG2UK?^_rQL?{}F%-#v&Mb&D@pigisjEjJA$j~;)eK{QrW=saL*Ozm{A zpdc}uJjuepv@tY@?UTNQYw@;I1C;w7ngub1nN<-Aw@{Fy3`wMH)?<-?DlKXaHGoC|a2Kkhmdd{c}LxnULpReVeLO z-q?8H{eT@C4e7cM9@RY-5TYQA0e!sYh**aLVQE&imhJsN_X9aC4r}3t##w}Jk6Zq3lPFl@E^lQ9iFt9ul0d^hbx=?lGv>?nrSI#Xw&aow8d{A51eXM+t% zEJruE4$ZyLQT^dTNFj9Fo7OPKr1L?YY1cY}alR)sJb+P_HIQ($; z4vL@5W$VeMS+>M z;aXGUD>+s3(+ZaNSyzVp=jwy_SVe%9!jBXiF+)m8>fG)^?P<}FSppJMGJ}dZIVd}E zpol7YIhO&Z^D>(nLHtQD0n)#OUxpJ~X1<70^NU=Sz22(5p*u)WWP$j(DgR|GIeTOjIoxm@c6_hRY1^KV9X-WHxR9jfEYEVW#)|Mmz!&0sTSio4Qqfu7Fb z8sYxhBV=%<2E2YMK$A(n6&uSWm~}-5xAq+Rv{73Q8A2^m*FoIwuSA>A#qHUQ_4;l_ z0#Qt5rAlPASr;M>`Jy1DuDNwsc636gG2zTpk^6*^fq`iriRh({_{y@{UbG0mk#vzy z!Tr3ZZR^Ov$$(YnlR`J;ysOz~S)Fj5*maqh^Og3~H1X$;J}_iE+FS;<6TR4@rg1(y zRx$N5m6MpO3OB}@EXgz!Cp!dd9Y?o+3)@t479nyRJ1YdO;;F!cU zNv^`1JjFy622L4`z?2B>2#srDSn-}561oF&KL1^slsJb_sHCReAnjFrmH-yXWt-nc ztz)=0#f~gkcg6?yKr@EgvKH8ua>pGm*x}Tr_y>@J=Hr7CUSwUgP1jO#ht_Ob(RAl_ ze3xF{To{e2Xz~NpmzzoFujj~Jr_&>VZf{A)4y@UhUR43LP87zuL}NKVszEu^o!21` zPoM=+beT{Ex|KPxdT#5V!`8>FtwYy7QyHut$Qj1Ht)BK11$1ts$cC?I!I* zwTw3d0_#88IFnj@jsX58{l&I@wb3#SJb;Tc72Ds_f=t0X+Jre=&q6uOMFix59o1|}lS5K7u)ZrgZ zTql6xRkWv()J0t}k8SuYdS!Me9a8nvS1|VNS*3)7S)9kg$b3PIrm!;IJztlApls|nKQkOue0 z4ExOJ>A1c~X-?RqrheHn-bla_V}W8S7uq}8;TL*Xcc6~QqOVH!N}_F(IvxJBpafa+ za1++-wx-Zm7bFj8JFi{yVG7_2K%*XuY}qZ7j#2BcYw-}t9ey(tMe{Kcp>*67uYR$kUjMEh-FUB{;`#y72`C%N)vY4Gh^^e>-;tMOx++24vR%@Bm@Qc;BY zbeh5g>Y}g*XdoCQc;xK$>=#q0LIJpyR+50BrUcu;(8I?@=QYvrH*7jCUv4lNUP|P6 zK*tY@<%Z>QxP*RDCpMheRTXc2iv)ai{+O7sj`5NWmiM8l)b(Zc?xW&4c&YDAX?%eE z^mnzM-TcM^_);CLK`sMQUn*j%HDoyXcyWg>{WBJx15f62@a}6194JNsPG31^AzDW1 zuUt9ul5aIUy;%qS--aEsUZoFHL%YKcSX^eZ8cFgdqW|p^m3_}cRt8yv<+dU-5Pf#9 ze%d7bkgW81Blr;99ki55ys2n~rc&_l=U;FKgfU>Z^&zj}>aeJA=96XcgCIj_j{339 z2^81Cbq@vI0UEvL>;zf1U7&j2Jms4}TSS2=3KLe4F_=zZq@{<20_3izx#|yVngiOQ zA7^-a!a3@LbFY8k*8U5k-!6pmtkJ*l;SIHaYtEAoOa2PUQIF1=j*M#a)O_yqUKVsh z?Lo|Y$PLJT{FWAM+UlPjn%e36JFPLT-G?h`f)Bkb-L(y@Z9 zBYQw;9(n|}I*JR-mJ_(F@(RO!a_HgD@P%=x`(#39K0o7!S)Ce%vVXp6hepP49UaY=`~qF!8zwS*TGOy3*F|O)M|ZDA|u+{X+lspF7bDvH4glsddul4Am3 zJQL=aO(EeR`3=VvVSjqXMQRsRw-r$NPOZs!_>i!V3TJpQI&vRzx7$b}-{ED+qWT`v z{We>9%&)Zdm9N0W=qMxgS#i-rp@!#i-JQ#WIR@~z!l+H+a?35{=w>weGYHeY#db|y zb6VT!UnO$rVn8!g++Va}(RPJwPo=!iKYP*N66EKhM(vvXY}`3GRm>h3n{1>ii|V90 zGw9k1-&s-lKhIr+M&8C8qymFWh58nI(7Nk zE)M4dXN^Y1D^kg-7zk;@49{<S%cLB?d14hi~Yk(G<-JEzh~Fj^&ol@<)*PVYc>b;bOmQuuGk3A zo9(R{b!4LKdBgi$9or*fM*ba3jI%VxD80#mVYFq0wR7ZS=!uI`EHhU|5sAx0E(T-N ztt@vd*r>}olE)t)^Ev4fTAyo+&%(K93*Vest|N>1DKNz5=O<%Lmb!Kbto)6HRQaGy zTn1alrw9~h%Fi6>@1nC65OuB}UoaQN=6uJ0*!?-ZI`k(M8hxqXkIa3{Jts}4oXk=1 z5>1@)f5ryi|Kaujs90BKlR=QDuF92kdF2cp6dR?b|C%AcTJl?W)vm=YvbXt>Id@WI zZ?4ztX&8(V_?i-!Lm^OT?f*5&@$TQUQ!BK51+Fkxe(EbZ0Q&icB|M2^#%?zJB4pSP z2n^EFZb9AuV8z=|F#gXilxf;fQ;_b=lIoVPtuCFH+iB4(^XbB*v`on5e4T%Vti-){ z#r8G=ksKYcqdlwANkaP;5^hr}D07w4q_poT@IC$+`kF)mYjdw%zua={%bnQx3E+cb zsZbNpI04+c7Mq{HH63^(vl;SUJ%ap8dc2MRQE@L{L){1mE#Pal;HMQny90S(WZ1jw zSG{H2x!7=7X#O=!KCSdwfVttXpO0Gf>a4wczHmZt>vHt1i(XqU983T9bS}!Ff220I zu>qTw?|=DA*kPBAqtctEFsb1omY~_Rh74n+XjR#@$YX#9V^d{JKuj<51 zTdJH=f!l0RX^=^c8=FBcdxP1EiT;#F6)cZJNtqs z*BHgnHYR8kY!o%qq{|wRqj*iOL}Yop>flX}MSzNNO#Iu%<8DoR(N%kXN6Ue@Uc>{mpUkoO6IJMZSr?e|( zQdw+DMU-FJOo=7n>D8C?dG9B=|R;j1CKLf5}oPcZYxwAB8{Bcw0SP3lK+FTFz zr;;FGQ^;}W)~j1D6c`1+a5+E23mdW*I5vxY^ZE?D$NL*^(pwdOM8C}AVSG|QPVtbA z({GLBIa?A3*yp`~#pXmK(qruFJIsgmqFD*Pdobj0md9R~%o5az9ixHSlebpWH`wrO z#?xb;Limjw<`S#pTfx~2+3T6H9u;hvM0d5@+H&~qw>Q%zzNJIN+I`C3SMs(6@YBO+ z_XKPOTHbT3ZAu<1Y4!4@IXu6?-4PrbO5L;ujB`yAdQFhm(JQD~0z8 zPjxq*L&)qUSxPTAvIY_a-@Nzq0S-%2f==FcQi>2+g?{E9p>VS%OXToFGpc)5%L;eC z``~{fcPQh4t=fM2;FaJNv>RKHf0~p!-nlnfld9+1@Kk=BQ; zFGTG3%m5t>oGQ>#?X3Pk{}{Kvbs4<;2H}6KDE)A;BDpz%0K0Be<}~pAo6mzq6M(b~ zRp{-n4x*GV=|?I^Gv#V852MLzMswItOr0$6XV+d=JUXDR-AilJIZ#~{w_UfxC5(+x zQ@_7BGdhgUbnoY-d$StuT{h)^)p_&hulJ83`XVKES8v2d3?_ba?@a^^z^@b6>F|+t zNTA#m4s<}4HV#YniZ@n-m}{;vfWOWy5J-($5D%TA?{s;9#XLVkbWUqSnFx9hrd=b@P_+Q&~#Ds`V{kUztAiF}$=4t$KexU;>VyliYWoNw57)1HvZ z(Yg>h7{qwa|A9iny5=|zgn#DP%*(<&_D>|h>19Y^q7`nmW;71g>OT8}@CX@7tj zCm~WKqjKa*)}b%?AN1r=Uze`+`40S=e97M7%bJc&S5UO9K@Nve7NH;FPbFXA;)U&1 z51}glC)U^)(CUleVG!kLFX>_TX!jM0WdyCgB$O*{Z74`;F7*H>rDzexSL!|bw_x^K zY@~Skw=AoJPO06Mp2OHTPz~B+dlJcL=hM2uqoqKk)C;J4LX*aJc1J9aiVY1cJAFcS z-;~A<^1R^l>HvT;MvCkb*TyAAb|Oy6jnT=1*U{^zXjZV}FXi~KB0$nP5B>zms}Nu~ zUJR*hq1IrYi99+-ua`_)A74FkZgNH!1uD!vcux$RU^6(1nas9Pk(ZKwj5ie#3U9dk zPRdN{DhDD^aiLucM`(Oq8w##QEZ`D5ayP|B<`pme(&k6-0NfdQ4#$njWeyODX0onF zjPBA$^{%^K_*)z|`YHVj-_sj_roQFzM70NjKG}y1s(Dvk1zCxKlRa|OOKEU|%;NcU z*xu*eZ9CuXCe@^YHlaiajFb_V>>DMe|7|= z6~Y+6^BbxdN1ln8ZQDjziWHKs*X&xehGqgpSayFC^w$#{sw>Wee9#~bG?zM6_%$ko z_$25I#i0*&num`?za}^0obCmqHp%bJhOcXW)MycWgg@XNk9pWRUGk6DfW4O+eb?>S z(V@Tz>s;Df_$y=aU+-mIxL6R)fO#w#su% zUaA!e#=>A(fG&LeZ`V(+548F?!*wB<~aI(3f8xTQ9 zp%HK@U&`;|#YqBf@Os8`>hms*>cMGR`9JmiWXXxtBjO&){sP39rm$D_An86=TbyNd zD?Oy=jRz|>S?J&K5iUB+##vMD7zhXC3N6;=ekJF zHc$rOPV7k!FfoXX>5S3(M`t!x;-^OrLahZTw(xkD7DzTtqJX!aXMURVEpXdu&>T0pImPlE>Bg0U{h7@!B&Fh4qx?OZYS#8nu9UFnYY8*vp=0(y_N^ zJt^L7D(~f+(Z$%Hb9GTOV07^-+T?G&;#!mZSwU|!5o}ie-c_}%LA?WNDXT~|ic(in zbP05`4lte(%8B;Ap+@Y>ZBW&O_c8qORQ9@qX1nhJt?f7AfTXm9=X|F!`rG~Q!F89= zq{r14@q`zWGk_?m2L$$P?L=pb7Tupcu1o)Y=uzbHm*QGUN>ft>JhpyJr4n z3mm(~8TXscK4Y{s8m2cr-ldk*6DGqPnkm%Y+d30=qUZ3ztCOY5iH?!&K*8z&;JA63 zhLBx@`OAlT!gS23*&a^^JA78hOO*wl-ZyGHpc-Jf=YuIxaGS=j6zG4!<;)>XCJ-$?Tg67~y@R4SXWr8NoJ2o3NhcGDby{Jc z*3^1XmSqiPxQK3wqM&>o^n%Z+2nYD5w~=?m$L?;SYDh9E1c6}5VCg_oskgzdw5W>e zwjzJvp~2adU2ZVZ99-A%WEqGxC4ORKnCpU4VAknN23j28qmU`1V;zcENRkZmHfMV{ zQ(5a@nPkw_!ly^;KH%V40p4<}W9EAHDf&WqHi)?}0Omm_wd zpGoioGSo&-Zb5cq!2ZklgOE4^&ti`egI8m}1m`W&L7(N_aA?NBvE)L{UQ%m~ay+@b zU^#5&ow?Q7z15+n|I@N~F_rDTJ-?$QFw-3>z6x++$TJE=2HCc-+1Td9NqpF0SGUeP zwlmhY)(si45%Z+JRdS4JUqElY{AUSh`B~m}4Am|6Z@uO3z2S$23`wLs#AZNm<~(B0 z(7`W{!VAWtu4eV0Sf1bzCV)p=N4{!e$>Ym74_C$Rcjm-(U^fY^?T%+$w*5}X?)Sc7 z;?7I=BPh}7e3n47Y%FdI05_p10jwO|9l9H3i&Ie)Alh#FM|a!kKCLkV+~RxiAv|GC zZIE@@4y7n%NhI=UW_j2h5S|$FC<_)g2v3MzGHpa2vmpFeJ*Pj# z3U9QIpXFm&a#W`;HIwv0>= z0ih)*4Nn1q7VnPxGO&(uh!+#AkBB}>&~dPmDVN^Y=sYHCEY4b1&bI=o)u`+agdFVI z2c6(CmnBk#HoUX0Y>pormqDM6q+Z0B4HoQG_Ry$UNnRoK2%iMgGA77Whx=94!3QO) ztAT&nH~{k47#nHfs_D>dIgogq_4W2~TuyT?vIoOfuuib(#5;*OB~yb`d)Kua~s_Fka+VyiVvw@4w5sn^|r3t zsU_LJkrqQxyhvDz%`Z8phptmn!&TSfp3pe4AZTpNJf(aj>6HXx?VYEo!NP9)e7kT(3|NP0o6Hs7%-F3?7VUu)5KCW=12K60r zf_R_)10a!ul&>7Vk_K(H*&WU3B&m=Zj`zHY4!)>kP zqPog6NBLpY2IXSMM7N+Zo zmaTJcI9tI*{T`ExF6zpOJs}lrgLXM;54YYmm9AW@&snpgbNEQSr5jdaF1H5oBSbVS z&VK({=b8;L9wAi9Vk6|n-lYuKeP}mn*VvgD1~CbDh+C$Gty#20Zd|p3(O#~2MixdC zqwi}z__`&Qp$T_r3eb)V%@@j$`y5C+%k@oKeQ$V;AXNsi)3ptCv(Wijaj(;hIkQj; z2UKFa(OVELeM0Tj9zgI_h-+aNgXs!w`0e}Vn|NwSh<$4x!926}c?F!>3KzRK@zi<9 zO0gl7(tL(Mr=mhaGsYn#e?cGRgQ;{V6e3CQ>$>Vrg-rT5sXliu_fX>i*|LTiGV$Az z3^9|2REmaNc{)UcGlYeq`S}P>Y|R==z@&eh+JU@>RI|}tB=435ltxpX_gy2^ANuJ% zc`rISzvb$^yMK0WR@Kc1T2fQ;4>tPLh~Ss9qXRDBeXE^^ zypO;-@pg#a`T^r-61nU89UgkqRAHI)RY!n`EG#o33cXzgNa(2Z#gxDFE&l&pK%IeL8J;PvxjCs>vHV+AO%j-*S75-b z_BKE((O{UPrOGQZi;D-KQQKVO*Ee6T%u(y!N)>XIoL1q{#oQNFPaWB}5c}mOi;$K) z6d58jk|&dXlb!xB5J4NLu(F=^h$D({i?kSX>Uc*h2v2Qru>%qlmpzZmNXSSK3pK1A7u*bvAwI%ps{pji)+O*-7m-1y? z*jPFi!50k`b>*3Ft2`sk3saB<6z{^vXw`mlnn~|(-C^T?oqooEBkb?iu-Y+m026Se zyoT{=K=4&j*L6o`=#fxltCZCGwd;n_x|br7{Nl_K*OU2vOrV%3D3ov`b?)r;RW7#nb;4^(r? zMZaNJs-Ll$k4xF1z{N2n*Qi2r>kgx8D=$sBMZYAjbXRe0EWDr`6>$>2U3TEuXjdE& zvAyXiRPIoHW#0W+eTcAJrHo zkgLM#t4jk>oBnyTGtf6#&mt82PJh)E$%WD`a^NXGY z&tp^0EE)J9MqJFrRB{`aD`KN`8u`2nH%}I3V z)uHxtWie(#yP$Q8n=X&+!pP%B9iVVDGEq7IFmv5AO5r89X@+*VE-hE=;2!u4&tZ!K z>SZ?*+ERX2tk71Oc!)Xrx;Df4xelGF1$m-l%8RyQ0Q_@8enK8$S$aCXB(63#QbE?e zxhT89TVoIqVW9rZ-&+xo9$~&hvx>vP_quI>W07rY*3Griz|2m8BV$#U{SZWfgf3|` z7vC5lj82@`Nn61aL^N0b@ZYh`+@{8il=Hi1mXg&} z2K*OWvg2?ZEi0wNhaGpdcl4$-P;^pgrR8F<{b7U;TpYH35^7tFT~Cm?mW|@||881v zFxPHjVpnbMx;E}J;>2A|&wfAik7{Pd#OIA);{;}W!vbwi`V-H?d4K+z_(_38Q1u?8 zmdMF_pGM!GJkZcmU#CQbE8f|Xr!0t1v#vUJzoVV^G5B1ZV~0@=KzZF5%KlrXiY?Uo zaCl59v2_G$w%a-yxKvU8Hn<{>^`U1h-m)0zbDTN{dg zk?bd6ZapqLvx71`;vN0A9_uUXOAI?Zr)h-3i6=ag&p6YHKcu|H`wkriTLE z_$7y<8$Pzk$i$2NIXkXD0gE{O9j>6XLW=xW@xRm)m7kwCAM_quBvz!w1s6$>=MZJr`iV^7^?a()0oST2%IYwN3?yxdp)8)FP9%_Pr+4bTN}!W!7C_Pg8r z-t*dA*SQQ-qJ!77<{@cK*CDT*0p_HIl(38xZq%8)> zb|5c-R$*%%*XuWy(fuaB;D}y}y>8Z@D0MJ~_w~@kq8D>owplY33xH8YQ%)s&o@^~v z1S3e30ZbdCDb^Kk0`}$=`6>XoacV+4h_3@J-Y+;J^B-o>)~mOoN-xgvJhy-ze9)W!|uo9}VXU zjB%7vsFKD?5w9qq`RiR;wRJGR$9(FDhX%!ll#G3;)L*UJO@QMOZCn|=;P%bYf5v9d zk(-p(cV*JPLHVvG0e^hv!-oWHnhaD<Z+x^HIfowkt9H?Y-mT9+V%x8)S zP{Nwlp(*M zo6EnLU7#7(n!}?uf!P{jtc?Qu*=irKlQ`44ceyBxn-qtK+x-&2!R^r8mki@?vI13X za=y-=78Y{LJ}b9Z>{w?U`4#di+tTIbtsr0?rHro`85fG9tu1%l`-aPR<)h*hDJt

    l7=jn=1cm0pytV)O6rpVjg4B25}Sz@3x*j# zpj9<}aOK$h$J#TVvCn&n~QaY?$8&P^m&q5ECozt$N``OMM5MYx` zw?mE%4R_B36THTIC!?qCA~dKQ0=|Y0ltaJ4YHU;1s5 zI?RC~bX9Ck_-FY212gu49H1IrDC~#Om09YjSq3m$)gg{p!W-w^Wl-Us0~d zRA>f_ls`qfv`zuXKW{qN6#vp({r9)QaOBc$9p%PKfb{u)rjDr+Jh#CdnxZr-wjAW| z*J8fZmNM%l@-TKLk_cBLPvs-knw0$frirP0RSKu!8mtictL?`DT04Uby?NB3h=AJg zq%82cg>G9DW57%-2<6òkc3WzG|_Ni-_XDrc~nV@i1lRLJ_V%jvHg>5qp%QuU^ zuB4W7HQxla{mZjlq^qRW;e5xD%Y*EFfCZPPZudjh^=MG~WM3clGz8=LR)Dx|Nn7Bg zwbni8E5;W(^q5xK?3g%J1fJtoph|TRWexWlSPrUW<%3*grYOX}1O04*%wKvUTJLIK zMkYF-?1p7bgA^{iXbiMp$%qU8Z7@Y_k2lM;Ki&KD?F02qEMc?-JXz!?-m1ujPvzQ} z06vpV$4mD>#c`h6a^4%&>P5`1#s(>-AW$Ua^lo8}HfuFfymxU|EDBS4Su?0<6LP7) zORG`YOxEmVJ!)h+f^KJEm;>L-bj+WtnF4zb8l@vV#@NL>sThdo{P(3eYh z$uW9D^c4KZXxr8X`{le>fkQ%JlsYo-!(E~i^`9^{E8CF+qLjfaVneZoh9-rd^C zmFn2Q-0NYYyP+~bTRlnMK>Bq;cA87V&)7BY;~YaJO0D}R;A_}*&si6ew0VkU#zwJVXN{#UcH0S~x^ujbYor9?YDH0E~B zR9-PzQZZN@DZKg`pf~Rt!9ksdM)3BTbzTD5Pofu~L~9r6z4e$p^Gf8>xg%YkvqZu( z*>ZRRB>DrkWF5dgcV2*12_#*CV_{4@vjg(S*U~v~Zlf@YyDc{-MRumxjm=jTWw8L) z#R<}jPY*~K%jXTy#Q%y2QoCq9A*oMh8?dL+4=s%IvX!gj=hi-}iI|%H@h11mUG>}0 zE=i$CWkH$YZbc0@Ilbwx9-C6UpZ$9z$QaeY&LqtyPJOkh(lV8{UXLD4swkQjj!^<3MB|X1J@Vy=*`g6IMsiLrx=oG0l_zqwA6oYln4Ym$r_#KiMs!* z&UaovWHnqx19Dy;8TkzM)tPr17r$g#ntw6B`6Z2czujQe<6uMi9v1y<_)_jt7yn|U ztA5S92y`+c3)B!j6Jc&mw4h(Jm)(k`7EN{f;92^26gK$~R+ILe2@WCNqd(T`F4q&^CJ*IC?KYbrhc8z-Pit&nw zs`+ojkD(f=BUv}=$L*29?qpd9Xu$`J8I4U(|HcgVo>MKhUbTes*DA*BYNSx6=^2o% z4co@XRzFsK1?Ig_gaSfh;6yVv5K|zko!xE1f=I~A4< z@G|UozlkSKpf$UC93EY8Dwpl@JD0mJSWvi5$CJSL6EGquQGcLcW0+F<3><)V?9ztF z1nZKc>^)%~JsFs7-08FuKGdW-_BGLk$ztu3YwNH7kIG`N zMOc*~;8bvZwAWF|* zM_WN{WJ0<`hl~VLETY@oT_2GuYxQ*dDvut|Q;N*8UNXxsGVET<#*DkrVf&hN-*KKX+-nk+%zu%eu+eDBUSm`eXlyu%K^s`N-Bb%7`FNB0S-_e2iD zN8)UN?M$1eyBNvjYN_(+Z1s`r88$X62ug8`1$#wAvx~VNG}|Jl3f5(Dn3%QO-SBI2 z_(riLfQ5W`MsvA8Nf1j!v{huoU-~5jw-uxj3#ZIOv=Athd%sohYr?F_i-*8j>ni?N zB}S-wU)4`Q{8SM37nwU*`DQ^m7N$; z)A~xTOeQZ^?DAq|y35YgRM$(luO%R82$^_t!#Ny$NA=^lHyeJmhfayiI-haHY18Rm zS38rezg#sP_5AR(an7DBPPIGR^!jP@B)J#%H(td*7fcc)jJrkPPP%Dl8`Ak|SvIvE zaz`+C`v(i)TG%GXvsa0%61$w9D$#x8c~WiVjvPIU@61@D3*#9Ey2}t_;M0yVx?_A( z{303f9l8d_2+Vx3e_tKJ9?-EG_Bsi_%~4OtF$dds{xS?|^Lz;3y;jbSY3hZ8nHI0qKTT??v+*knBoV4oA>WU=kGY3D4zl21GxMQ zmZ%j`()54sH%_M%B@ng9GvhZgMi;aui00+sd3{fv5tmHt?pc)ssYOL|aCF?2ymNzx z1?I`JI0j)(q4^f`f6|4yC*i?>+slw)MT3I{`e1kTEjhx?oNT&G_P`u`zBf8uM?d@6 zBP9J2$g!<>@6od=g#p_+mGbWYHn40L%)#&QMoTZ%*BF0sh{f|IU=c>(s>*F_$6U%g zO@e`201;+GGaEOOFImQ7yZSs|7+Jmeb7MXL3Uy@$JXzazq*1r=)xe4Vq6+Q0h{I-U zQLBS+Qvtl}`?rsc*T)Y!5b`Yr6UVA+;{0jgS( zq=XZa1+ofZ4ET3L7YIcx2((!jq<<#sFv+JR3>hjZI{nBkxD{uA&0N%=oDH--Hl|Yk zVaMIlM*4go9gMGt4id3|{`PQkClB@!u7Rqn9SweXB(dW8v9rlzpO_c5wD5iz=AUMo zPEznc6^oF(Gwyu?b{FUHu-77NxA{r3Ks5c7O77mep&E7$a5)V+}?F1 ziEXlY+(_6A*M;#_KK>f(x4I_(2?WKYXo~QLsOF_800@{`1w@sInt$Chxql#djej#V zBkb9wCZ@*t&z*l;Luq00{z2Nj9E-MDv3KB^UuQa!Vu}r<20H|vhJgyU;B{d{%I&j< zJ+3~q@jSP$Xhl-&+F_C8R^T7bJ{$y>Mr|e4Vj`o0zeN#nHUJv9SM@v``Cf2lwHmQ+ zltkY(Enq9-P15c0h{^gv!=Avtl}f8Ez7h7wl`YzWPiz7CqVmb}?HArQE{6(AY>mnu zJ8XH_N_t-hxl_>b4ta>(es6}l=b>(bZy|6etu}o<>JYSteXeOunMHG_27 zi-yn2n%?$${%Z6b$c32%W+N4%sC=IV6%YRJnSUIhbHf4Ue974So#OMHnJq@!|0RcT zZtcb-Z#Nb}9=d#Ou0+2r>c+d;%uFq{WM_t?4W~Fz5Zn3b`k3r%5m?V$GbFImbpAlF|~v;3%(S zZ{yQhD~nE;47{{lNu>DF%IWwZz2#El-&8V>?*z-OLYvb#MG#TNcGqxl{`vIQOfQ<^ zzpc-OGAq6$C7Fd>%$bnKxA8>u(-rR(-S5DKR$GIfQr5A3K0kLtckGhliM8>n>c%aa z>HddJBgqRKjRzTwp}fa!@pVz&1@R#PCuxg!nl zg6-nm!$Rf6XIM{l&|V`E7wd5MIC8ZMZ_AnDa>e>PTTOBjdwve<>Xt4ob33H(Oy zJl|nIdfrgnHo9W^7=p3nHyU@GvmSTSI_K3Cnyq2Cg0QrBAjZg#5=r8zoYUI~hGg*f zD1WDYCev{+nBTkqnt_WE(`OJg`~;c%3p< zIfwb9>zp9pI#k)7bp5*$77TK~#UPU`T8BWtddK3xs=O7gCt(zvAAW%TbVJZ`I zU$WZ5IkKj&3-pKt7M<8esVNir4be}oEJtse3VODxq9;Arc25Ab zt&EhZ1f6_`IE}t~L-bq>oX-V)MAmd#4DDvDjfJ3ec@H{g3wkjUBqJNhZg^x+8HHK% zc#8AaGRJtyU|=R4jK^50)P3yrlbc@)OA6xRD)aj74{B;$NW)gzLJaw7zZ8#}hg*x^ zjPz=&EM~IhJBNirDx9pn;hUR#+?$|s&r|U`{_DN~2zJG>NLDk(KQHgze*c2Fu#6r1 zj&xzqfY{+_OOSj~&`?fv&+gl@!AAJs8L3fY6GXRU@#LtP%Py^dKXN>_NAs24BYNg~ zQ>oRMRikIvdPP$*eIhF4v^528Iq@K@JMS| zX*VZ^VHj3fWXU{y1#6~Zp(DqlRyDfDV*T6q1hk|qJ5$Q{N7q+L;V2qCFPNU7H^J2K z7SlcEJgTTZwq15dF!K1Tn5dBk+12ZlaK;}l=iWyKyTliJlCeI-NSeK=q*w25`=Gqr z6-z`760PReFP;rI%lM280`=4G!*Uw2ja+sFIyaQ|@2|QffpUKxTvue}#j{VHY#6!~ z&ppU!7qS-%r-lh@t9X+%)tiv6y~gutJJ?cEAn;yYrNId`ljyUqRh9$ueqf=oMJxI8 zQ+nNn+mcqX-$jq(HU9kEYg;b^KDrv$hVrfh>&8I~wtQDQOu=aiwEllXQlks&|LFm| z{=ddD;?n%8d~;e{!xPP}ow!qU#{u354j-eP%h5s{*hKIscSTCRG?A zl9PjmI@uRy+nV!`RM|N*^r`=1(+RK|3%{Eu#E)>hx~E&8>E*IMN3RLz=ka+1%X^cS zXp2t(k7D%)1^nxGrN8|Sj!4bf{O?7?D;!`%x zSckUNDzti`Z~S4j>_7+9gBXMs3^?|ZR2=iL3_fV94y@mwJH6^Hys>BND@8_hG#DWy z4;Gyul{~%dE=GG@RYoFvv@$jH;@^sH3*-t+claaKF>R}5ObIHP308ugWW4RqUz#?{ zE{YQ0ENG4^_21$eoRnVruCxOGY#IVxHO6c|a@byVbFS*YYA0KR-H${a4<|Do6Fv%; z(a&S~9ks>>rV@;I;+@}Vyfb%@18BZsYDn)A%P3E)0u^68_hlXZ7i^R0UYqf@URz2# zG*TJm8quOnd2WN; z={oht{Yzv^@G30#Cs0{9NuZ%fo|yl;&G*gZ;WhE~30`f8@t*GMlTT7St}kT>ed38T zOVN+ouem27%{zlrdm}HG&^53SFhL}x0uGSqDoUHeqE3DU1%YI9BmDRn1*vE}U#t3Xguxc^qE$=v>IHotAbvZ_8;uhfR5~ zC_!(+a=>*4|9f4ll0ae`6C8ySH}ST#OAF_V?FtZ~=PFz)UGnP23acD0 z3BDSGvG2UjR%ncoe`_7($t}oxVT;bPQ-qGqRE>h)qTD!7_*zlIHf`-0k!Ksnlm43Ohg?{V=iBe44$F@T!HZ zvRm6(_l4z`rqt7=YoXtjrpvgbGKy*Wg$#W&z0_tP#h#1{98~UR1i&rmL8B?^u#QP1( z3%+F^`{~JR!WAX%ayqa_*jwpgW3%L6Ikmg&w?2!$Uj+g<*{0mn-ziCJ+cq6BEsXLx zN^v?iwj`i2<1F2+WH6&H`u0+zj&v?IHmy_t($hJPr#P#uXe>O8FdcO^cot;FE8@Tr zi+3^L9_^0vi>;NLjDEwoz4Qo*vJcYE{RuMuu0*#TFD%3&xTks&5H{t&uI7YDP_}8- zr;l7;B?+HqjD2WBSv=Cua=G|l88Ln=ND=rfp=*pn7O13D2#W$%rk?x`*xi>o>5Mhn zh2VI%{szj2QM%4i@ON3d4Xo_G%ziYY%RTaV%`O=hGLF9)`~`0S^DrfmSkdemwj9mC z>nCDv6_%_kDh0o1`2CgIZ5v*+4ict89LvSCkM4B=weT|0=d0|-*o)WIU;Iu6{Kh6I zDCZU0!s5)#6KPK%W!>Dg?a0iVCir|xh=FT?26l0IsFz8rV}xJKy!_yz=g3%C6~*CJ zb*2TnB8~Nf-hQKUaSgOVlX`si>OS4PhTF@oOQs)GBu$a@qGbYArsC0*hixqMuwJF} zJ5hS18f&@!TfeLGZI8)Jq@AFe$Pj0twR)Z6_oOU4aq}D32PL?J-h=<1SM|GqrGMxap-VPQH$qpk4GAtOov9J*Vg8e>PE2d(Q78t_;<05!KLN&1OLQ@YU)x5dK4dIVKAG+f+HFoM(e+> zN$ce7kF&d9`=g!|WQ5l{K_t0nuHwrj#&4gap3hcE?aWz^=d5%A29k)?;dQ%Yw@Uu- z`#ElGt3?ajI$i-K1y44*xit_v-rsn;(qWV#Ua_+r!8iEtT8NuN?vb`l4%7wH3Kq`C z*Qan7_glvrmLsK$VSB&xW?46&F0prYBYz@k}%iN z_khV_^7+a$>j ztv9>d@FuoSk*jOY(usdsrFjI$0}kro4Mvh*8J3wwD5!cJ(17b7ivRVlqL?e8cD-iU z<5(nlrT_C9WjwBSo(AOa1lkIF)R60&Vjc^>0}hEzyc_J5=O!sA?DACxok#UpD~_ey zCX$fdHpp{Qy!wQBkh&^Isqn~yM?9VjH7yG~)e>NeqoPKPRTqb zlTh+?lzCm!QTFQfLUt$<8A2BTkvNM?Zi-yEajtQ?ZALr_0SyL$(THowzV^&(t8PY! z6WZ5~BL9iF?}Ez#M3lk{KC2~7NKgS_ggEXmS5vle}6su}AyP8r2s)bX=y*lp1>k!_}I$D~2#za~^rN z?D?)F`weGZTSJwlN}7?w^7!vcp?R;B`!*VgjU4zhYv$5JToKGUHF>fmX#!-bZS;{n+ zF@6GMlfi7+Fu}c|sv-$y?iHs)F(HIEE27tlvj0t#Cd)Jk6Ax4N{d_yWQ%Ua%a&ngu z-ZD@xZS_gKt4MoG`(!OXI^3?Jt2|Ye8*uFj#^zQEY8D&*C@yPIhUh&!e52N5EqEsG zzm9cqx3o_2(UJ@S7!lRwn$ub9&p3*B-8@jl3WD6@eB72Xkh47Y?afn)dI^znM+Q&} zP| z0Wb|hx+oDgZvOAlZfOtJT;V=1aV35ARjeo|ljv^QiGS&n)>CC#w$!Qeai*N1p%4i2 z=iZ6>u1k{)UjQBc1sCRf!g!)K(d2CuF*|a?dZO?$)GE9{Xzj{QB$&N z^)9zROCZ1DJLFS@SOQrIA0!I3FM5jm~Du1dwTin zIQmRnjRbQwrt{_x<=}wZXNAH2b1Oxlf;686fuxpiVg?(0eX_3o}_IRFy4T)X-n$0G3xn9k_-o{J9gh!}A~Ei{W?1|2WS_+%IzJ9Bey3LS1F)!v zS@Ql1^KN(HVt?eRMHqZC54rgpRhfwdqlDPC_Q7e(oA>(PhTa+jgOL)C_x@>sYuY?c zl0T1BDr%d+6`!zVS58;up3zuF=*T5+C@_tSNo-?KJPROy%+BXGTl(`^6 z-Q)z&Z$?6E-~#EjAIj`M1*zm{>J8%o8sPFD zA$HqDi6nmY-SKZ;%h#Z$)XxIyc|2IvyNHeT6TOV=N(K#H*O`sk>}<64gi`{$e~;9=p~dW3xu@x zq>X;_hLw*;F{FR7r29w-cP$017d7u1s-CR$ar$Zi6VrRi21lH*v5a!o5c6btLqkEe z0w0VbOWJ20v-3#lxtjM16#zo?!Y?SgiIs|aDN;mSbbJ48fe4GvDGA0P^cFAAPwAMl zAGs;7OiV{WP{MK6BamdK+i#7U?DbO%KRHxtJ6O1I49uPDLGwhPqkSgJ|DaZKqJocn z!hg^voWhgdu<{^lUE*2U26?f}o42{UJ$tV_--|aUeDTIBXvDV;1YWo{-Kvm3jwfwk zD14Px>31#2%?($1NEr2P*7k^A!(;H2Hw}i7vEl0>#_>t(5ft~QP1?tTlAIhkMxz>= zYCrjyJduq5!g?FG_4gU~b=gC%;!_G;L!L!aXq4z-b?RcQixry(`76s)$ElZ_wC5QS z-IKKYBf&xcRz00v^cEFg98UP_>(9A&O}QKXQBnNaTs;vh&Jt?8BX%%%o07Mn)O!2U zLxbbUH!{aeNb~#5X#mi8i2HMC(>&TZ77*t4kRO2bYK}(_Lvv4AecAX8k2D6n1A5aG zt1R&n zq_OvR$qq?u{Z_p8w7uc6HHL4)W6@Ya6Z|svjG%B)tXUTZ^kl z_=tI<4A!KZIv^HzAYEd^o`od;3Kjc8vpYeEkMH5S(8SonoG|K|4KGXhT9otOTnq%0 zg-`0hSy`>SLd~>-5y?mmqiCnFw!aMPp{fS>)V+EyUXW5k{-5@L|7msemxv*>S|wt~ zJZ9z*{!Tb}FmH^Z<#|IB@^|xp)yVF4zKp-EXMKwr*+0Lt+&)xQFT&)Xt+1Y^u?IRL zmAo_iG&r`fy#C(i_VYruW*{kvTC^09SI`e?Sw|%W`sN1=Et~yx^|o}44oN@F@i))g zx-O20$$}dRT5Smy@+y4SO)q0Dt(=txhT8qv)|uEIH^+-z5yRxg5@REhf|1rT@$Ojz zU0p#>%hU(2SoUQCHY`N{a>IlAF#6Zh<-*xf2Py?O-T(=Tm&3m%%-0=HJ4lZ%wiZ@> z0L>P*(PXIZ(Pjf1+@zuZ6NG+nxjW3q7S4MoMO@-6S zVG&q!Hltj2bXD7IYTWlBYh0(D?;T=>S&Mvr<7Ig)&~BA#3c};AXO=c%#^(K3yrl4~ zGL0$QU8VL5<_%#kDY2-xdPyB2YnL-_#~YQFJ@h7ih5->zD&}y%?D$Iu)nj{Mi&9T? zMLgInq8#1&#}6|mAIct3XrK?Y#sQy!oXJMx?$!}(9x0gZ<>NUbClMnVgzy!Rr#*q> z9)Afam>Vksw-Kcd_H~M`x~e+1;!nkxC7_A+Q@;6V@U>tURWhb6$b%6U841RkQWXHd zow$q!)vpHuV3eub>akHC)uF_|Z#Ib?{_hjE8fc`@$}f|}i5-6BXog}t@eYOO?xfXD z+}3pP>zt)m4KqZFkRW=2gKq+uPkPCBL8>qDAu{lzfTNMd>b(wSgffZPEQLOwmnSyM&Uzl>(b|?aPI@7h zf+r&{`}_g88UVop(+Ntl87K)#4vDR#j=-g&L-)7rzCXac|7D57;xLH|m{qi~m{Pw2 z^&zR+Sc~|4aaaHxmzSKyYWP>nua zj{(-l{Wl3To;HM_%JhSN&9sG4=xdT_ZlC@OV4yN4hmHb=F&RaFPrcJE40`3-Q%)4L zi=>_2(T+y1GVe(karq1Cr^49|uow7b85`+{HMj>z{uH)!urVB((R+d(HS-DU{G13C22|b4*X*#$H>37~<7BjB+4|wL+P3O)@Ot zz{t9JaydaaUx!m$gV&ntv3+5|CQsNvmXhEp`^Bs&luEGe?fWH)vBGcBRTEeO7!!UU zY3Pw12C+}m(rb-@!dQsK7DGgzSaMgzx1C2-9(J58H9S{(DxkQgfHrm{ProZFL`{Dn zQiNs_FUhP#{-)@yw>%s7#+Dtdh2I#s(urf-8^{&tSmnvq$Sp+Cp^Q%uzRo8@W@(>v z>pBk`lgnQU(6RSNH!v##+e0@Wv(;dPO_m2yrnMlO&&PJN6$f&G6AIb41G0s9=j8!r z%n{m@{zy0casJt0lCp|p&=9{ZFL(M6WLEzNT-wv93|0(Qwg2f0ZQT>uv#}wbsYB*H zKI)sx=s^_bn^HHU+|aN~4Yb$c+xiK229InuiuZ~M{nz&$t$RyKY>CIJYwH-rrjEGT zej9i*l&LE+sFk#0Uo(4S#{!Nx;6fA!0wE20F438Y1mKbF;mY|gy*c|u|Er;us2$_G zYUI@%R0Z5IlNxf9%HyD8^duC4g*M27>)dK@V_UTnz6g;{Yqr_dKZU4EMZ~at0ekIW zQ(Mgnq^bb@EQ8btZ50)yMvEB8qCjabda%()ODzeV6U;+?hHZ%`Z{xwYv~CfJ(L;a) z@`k)Hdn@LPW@i-OOs(ePh_4!>uJ|BL|AN|h8hRP9ERmE&V&mme%<+2oucrcIX8U{+ zR$yqaTWqjOvSQ4}=3aWYHvW5*nZ0XPK(S0SESY(+xwGKh-*>v^M@r6r^v%@%_eiXO zvnD7-Yl<$mK|2z3%0#CwRd*7nHre(@Mi_l~|8du#igMwvth_TFUeL*+ahR!xwjNC|L9xf7177)Mrc znS_L2U>!Cvr`3eB6U7(hm7!ePhyb`;ZkL1l>4Q@spuJ zmE%*uqql#FRi}DH&kW7o(utRAUC=52mWNtVeBg<)RM1*kQ%!{+N!7O?r8PRVKrxuu zNCz6+lse|P4{4OQ6)R7Db|L>Ea4`$Bl4pUL@Ndf656_*}LhP9k>tcfb0^Awf3BgvE z-5KepY(E4Bgp%UyrdW$@*16HL-uSASq!fgnvB7Cr&n1p1iy zm0UI0%d3Agod8>d$!4nHH_)6Z`(o)WSr6Yh84cJvGQfI>bc#V5fE8d>l4X&ERzLlQ z0MtVd^N@?7^Z7B20=F^Gnyj8~WxFb5l!PuB7WL8Wviry3m4vdRY4K||{9#|eT;g=|{J$C|UW zZ_#ExGnPK+MkaUItGm0l4wYiJhue+kWhe9{1nlX5q;LCivZ^Y@2TN@2#k=ugd;R9wyXFxO9SSNMtVnKZYmq^B&98+wrbA ztLsbKu`CA;Tg=wIIT>N@PCGZqM=w%@TM_{#rl@=D*_ZX;k(k=9G`kNd`{wXlxl+oNAI!MTK20d zzO27jh+KOVzE5uOO_7^tuV*39VBksGRGpNTQ|sOOAkC?p6XXF1ly5GTtPj?_nvRea zJe}a}9@kU&SD+Tb2|PIit%9c`(S8WM!OY4E4K|gn$KOt0msCfEQ&)FuM}Yf%v-|}w zJh3ud<@&M6pqMYGf+H2_x!;wjpQz}B=tT>O+XN}8!8u_YOAX<(V$A|Ir<;NTPa|2v zU{YYn8-(o%c7sL<2EO@*NB>f5SBsW4nykr+#rHaKo2OKlsXUgEkoRJP+TCs4^edzEn_4MDO~v=Y)}X{~Fw*A*Y{(N36@pB{ z24?G}2Xg<${wKYc==GC|xw&3CCw*~C!qF+jucFk+SyKFeOr!NAvtZ8&g6VLj;l{0) zJHtfhI1U<)`+~0Y7wJt@=Z6-b4pMm_J6CPHiynG^cBp^JkSu(t_im5R#Ms6&b9S&E z+%|bhC32IZ7q^pMsk3)m^wbUp_3Os}!h~?H6Z#0ipwQ-@$ZW^-mL=n@VP z6EN7nFpzKP;+frSAOJji>IFumy|-;@|N8p&I@d1+!HCndIFo1Bw)Gq>aS72u_}VoY z$g?)bRQt=}eJjbh*mc2T@VW8J0R$;!8jM7EgE>?Zw7HV7-E-XiM&2+`G+OlCv|2%l z+}?jxbXeBbNxh5Dna*=NdTU0s1NDY=<+bcLG!5~%@08}|8S@fnts~S1wHsiA#y!+< z>_CjOA|4Z5B-9}%$(|Nw|)5Dfz*xCD2K?+m)4}o!R`O^U7Gu|)G6HLRu zE2-67)Lj`ekjA~rTBTCZL$wa_{i7{$vjSj=0WS4>gaSBl6>9$ko_++k?4{P_ETW>rF zNOAY5z>_iTt7U^VZrH7c z*BqW*`|6A|YI6~4lmqde=8vA`O^GX!EO@2JdaP(izh8bNQwe-2s+l0Ry|~%Ma@fap z^U>@FiaDg(6$PAR#;#M5sp|>7d2AU*1Us7z7f07f5hQW543`0#h{fH~xCCbkukg_% z62HF3NmxLd$6#xOLpi~63+ftGLmE$Fw!IA!VlqmY*OULQ0>P}EwnQ>xK;pLC0BiTz zAu7Ei0jD1HGpyn(&^YBQt~iA*=Wx(VHO`~NY0AT0aGi$TeJ1XD+XKIyzmt#F0H!yJ z@s4ZHeGC*Wk!X(&V21_F7Zkr*Od#=4Ir9;r6}EYJ4%*hFKC+-|%?s@)CCeNU8}atS z&P=dk>^RHd+}#?9W`8A(IrG$b@}w0QNR*Gi3w;)-$~SPAWySKx(}%mYKX(h`UV*bu z0F*hpOe)z5^$-_L-qZ55o4s2DJV#H#cD1j$wm z3|$&X)!v=hy)tsSp*o}J?_ka@sJN;-{t(ixX055FLhd=oni>ko4&0^Cn9|1SH-_An zy1J!+aJee*F$I7M-}+r?6#?M+r`7KRMnVL#cmi20dpi^L@Ut9xSt>iXxx2(Cht-Zx<9?1;ovjI zjfUEV`x0l0&gpiKf}s-uR)dHOcjMzno$OE4)xKxkN+ndzuM634tOX0O@cE&pPa-Y; zvK?RcI5w(tuMwhycWn#?zcR=>vwM0%8u*gtxJUf=NRDUtF}nOVU9ejw#|HN%peM(i zhvL!OuP;;nuC4tkhE5wB*zjqW&e@6hnW#I2X9 z#i2p#5CYpSLbvQ8Bu5up%<@BaCOh|6l_d@QbVBj@6FZlGFE_9Tj+~d+RTN>1+c9fT zdZDs!aWBz<(vyx)ZXQkV1CC3oN(K|=>a7Xks{8=l!KrgaU%I^V|NWzjbF^bBf(zZS zcZc3jMljBMcvp{cqt={l`GLk$J{kpWUWZCURsm?cWqF})CJrq{485x@qj+|VZn-Z` zAeb~J8Jsx=cnwJ7d$P4M=SKC;ZCV$Dud7iccE+feeHbD%)57sdklWKG*@~QQlK!)3 zPc$=l9oXtjK-!VN?3yUghya*Kma=|3x=r+5$%E{=R{^I=7(tVrUgJ(%)wQR$J?@LR zc^IRJmZk$;?bMfO15^^|DN_VzXKQmh6navd7d`9RP~g0>^f$prmbv8W+AKw*620|? z;*S9%)80M3NFagr|;J{nb!qM&}{JuA5YXqg7RCTQOCL zqDN*_V&Wvmb)z%>QekCL*SUy=XiJ|*s!6bMkc(h1wgJP?^Rf|dLyyXv_Q5;8>O|r1 z$rQ*Jf4?*N>-S8*-&1~bHrK`|uH62l#JHAz+NR;=$fklc>G}ee1|CS=S`o-**~V38 zd_{YfBFH{99&+hIocW@Y$AtoCc+1&RM7YCx&9{^38wf`bdLNWO%W*`a&Ra>VnM2Ut zmIo>M0k9(rFQkTd*h$!F)S7$#o2`cj0Ag3|H$+Inmc`o$4E92 zsOqE5&3%@P(&oD>L$McUkK7Ai<1pzw;ab@~$IsE+jk^T9{<_eKWKH#OC$2>6QeUd1 zFWlgXbM^a+xwvBO0QhZ~pin-Oa{DXut%ngJw95S5^8#e?APK#YYSIQ43%-$Ev%-f(jB%%^g*H{itw@R~WF@s=x=e44IE zIG=w_mwFuc*AY^9)#xCuL5GoqFrJhT#IiWK{&>>Q zTb~K2^eu~;Ln@!7+T(#$O;KsL!P8P2KbE5e4Q`EHUXwM(3OZsi&N+c`rgjT|U~8A} z&?p>QRf0^ESGL{AEuUMYH$Jq&iN_GlJkx?Ff+!`vnR`>*-eUmc1a1>#X~+ngX>4f6 zuDA77btZ}YPC~H%(ywSSGdF#1=yJ7T^k=bNS~{2T zN(XaOY1IjJftQuSsw@8@unIEEv7n6@o{<`thMnl!N@Uo4Xr?;H9=AW=vx!k*_Qpa#?5kN3^Tl{Go3 zB$u2{Y=7@{(z@hH0-yFX4r|`BM$V`Te{d*{C07f1{rr~NcF?%I102R)26BG$++0dk{3CsE!s3`Y25D`2q=DkIm8)J(aw3$O2yV$5x;2v^7ukWfP3=)W@9tZ zum?#9wakN0&ZD*%9z*O{cN@AKf|NH%I1===17G%Rxf2#}7JuX{Cr`x&47=;-sfsK16k&%0a&D;0NHVejcHu$YGUb{_0 zkd`kmPB2=|4iY%=YStK`rNQv-utIp6hKGeKa?7dX%~K3eVe+o>-GQ;xK;G_-uce0c zl!~h5Ta1jRd%_I&)inJ0vuRv1a8zT!H&t3mDVyfp{-&C8mHr1md3K(AM<(Aueo;Cl zpN=f&5(}}NR&JxnO-%0lZGpdIUYpb+<^-ue3Z4Hne z4ksAv92A>)4Z8|%eDX2szDmaGFg`cw4n4{w?8qMMagE{K4GUXdYWfuPFZ6)5d+sKN z@m}P!pcB?_o4K6Gv|4z++9j3OFAN_?Uv{qh> z$$TMV6E4`{a%Y<=c{N=0B=%R+v4e=8(eoZg8RW07ORaJ*_s0yyfp1ZZ795(!Bu_Ir z_h^>AhU$9iVag(Mu@+r2m8Z$M9qtKMi1jf}rKZD*ODssuJWK6PoBxr93ia$+a|UP* zc^?81gNJW)eVI*1X*_H{w#b|J1+l!`q((aQa?d4Ale3$i|05g~P;qi719PAe7zxP$ zzv?57A`fBo;?zXXCn;K^jf~{z2g?UJQ2a-dm?Y+%ohpuSoT6&U zuK|t^Al{4+7&@uys{C-I#s>di^z%N&1DRo-rIOM;J()fELe(3zAnzNFwwW%!ce-AG zi~DL=+=SXiqo5L`{(FACA3mV7b#da^1A% zG88Rg*hW5Lf|6K?_?j$)MC+3VkKArQGDB#JG^fnUUPE6D8@wm||B1wJ_~H?w8i+S} zVgNewiAQy>zCaU2(EHx+N?@Gnb!c~OTa=eip53IPOwwjGnRqc#vDYLDi)b=mpw_3xeWp(QQj}K3_4xK>$ zZz1o0{M$*!P~|Py_UjDfS4k4#jWXU+JI!BP4%4Vnt6M2<-+k>!*`dMa*$>s~u!kLG z;Uxjj^E7|Wp74nD>NCGS#lP%O`CTb8ap9>SPTsM`Pq1(M+Jw^UT2Xpr4T8pL#5lUPY6Fe0IxUg&5g? z-N2GsDR%B8G+TqNRAUqOlk!1I85eO0?5AyM7rs;(=%7L97w#)?lMiK%no4`Ep`MHO zK(x_CG3crFYdYmxsv4$&1IyvJ^J9Jhp**a$nNFy8RsClU_G~qEPVq}^J?e&8?@SQdOOAX1Kxib_T#!uT_pO{LgwAz z1gJt0MYH|T5&KAXc%&iS>ruI9n*6_TYA&2iUn3jmQetOu>8-m`A|IX0Y;+N?Rmd0@ zKMG=d98A{(m3Qr)5Lzp|6^$Rs(=^5A=fIBNAm^m0YKoZ1+%RiNO07&qEgCs=cwj#_ zRPxwpF0_cZGkgE%FLypFtxrnrtDpm}^^TJ_0U*8W_DPOvm} zPCkLxgC*xvL5Qnxcg#Lj%AjIr$EaFL&-fub^vH)wvrM=_B((Uq8}%~K;yQoZfX z)OV%o2JH#2HgJ_BY6q;4g|4F@0u{Yj>sF8za&as?p#ForEf#IJlAqK!v@!*!-OGE{ zi#r6hxI+`f|0SH`&E`x0l9_y2f+o#Bn@%JI7X-yHS9a5>@R7U^EuHbW{R}QGab`7H zo)9R3av85ZN7^8us*@f5=KjqCo4qj3BCh<}hYg;P9b_2>ZeU+lmFgEuiR)fIn&G9s zRzY&K=XKm^LXSbfhYa_rpm+>&RYOn;F7fLi<2u#qV!G;=IQ3E1{^`sXz$)9nN(^!N z$a>KI9K)`*_qv57Xh=_iPX~YpcOh>D4R?&lLbQ2XPK77tnGZPi<%j=L=GNw+(E9Bs zl4Ae;$4MWRpV(W#Q{P|+j*th4&Aq9+w_B92*UbLldT6MH1*}GGB33>8B4|+Zw&;R7 zQ$l&OYnyA^WZ>y|jM;KqsvW-FQ&~Rodqif zx#u=<2y?;XGR>(?Riy~&v1J94OOqh9y;=K;%pLN zkeK!Qu(2iDXLEb=qU0h66-x?0R5{?^q6jqrM}$=K{8ct`4EAj$s)kPsiAJ&u%d&25 zpE8dIeBsJv#nWHDD}BabO(ZFki`JYoM-AQYvw`}b3?c(AsTWqg z9w&e^@Z5A=T-C&09Janabf~{{u`$t4Sn8{4-&dK_m&5|xhD8xTQ00EL@gM78s{%c7 z#7M4b_< z17GxODGHsK7GyN`W+=&mCUe|+8x|tjZbH*$thq~fB>YBK?2v8Z={`WooTH^^k)wd~ zh_d>>A!Xf-(A-AT1VREniqK&bW?qoh^<%b7fU8r6>Ph8%q5_ z`{cXY0D0EQ%OUzt3wC|_)a~HIz#Nl@=aV<{%3P*ucJOTx8W~q2iNnanXI}K}fN21E zKqJIUmvek^RRXVLb)!+sIdIk9;9{pDHM=!zzQWssEmyu9YaO%im2PX}1?Kws6-tvN zM%IYjI^0r5X!}i=S`*d=+<>J9}>iHWu%% zEW48yNSMh6GS(=^sn|5PL4IXHG8xc(0wSN;ojz}1<}Z!fM)c*o zSm;TKt>2YEo7RsxvV0d|2IIzG&);sbD)QNg3OoE~Gv^=FOM#^Ll4A7zm$IrCM|P9L ze!I6**z*3*78c2xRc1oR2lv>j&xNYVF!y8KtS44XXg*(csiPd>^8JL9**h4nBW2HF z53J3KUn%JQ>so*QN|VG8-`u5qwIrUp+g!cjm&+RgA1IBfW~<+Q=oFY_@=r|CdrgjC z_BwWOo|F>nB1p4UFWaY{csci+hulqF0oV;W)-F?oH?x<@cjH`=dh<3QN+xv=`39t( zWvVARk&BDW#||7a7%M&QjauaLXPKQCEHN>`4|*@)Kx^S^1aZO~HkL|pTqC=SiV$~x ztf;s8$WDFsKZd-n_&gX^b{N?&P}RNfPy9P}&g7enzz=M}7~db#wc?c(N4}k-1YPtk z6JMjbS2u#DM_1Q?p7X-$($Cs=Z*H}Tz}d=QgB>@dC9@Zy0j~E8heuPj#16fRT~}71 zL2g0KOQFO~f|ywD--+j&0QU^>HpM%5K z_~s$b`_!KMx@xH>w-;t3fj-=>mTmPZD(t5Yzjxa34);RMEuHQ#DR~f@)%&5D=vTbu zRRhBlkk0DsV{rR5@n2cIyhL$0A$&0Wx6r7+zgQoSj3&GX7w^ZDyclXBl2(vl6D0jV zOucDXlIi;fY~xH$X);ZjS}HSbR;J{ZJ51BBoHAv~+!r!aQ*+CG0V-2VO{N^PT%aubIF;U4C#RU}?5J6;l=DqtL@9`e*m%doZ@$lUDbzSFmE~-gqx4ALgq`C?vpk|pOgze*1oSQgWoYc@J4X3wOI0#o`O2?I}eMo{OJ}moxKD3a$UQD z@)cUOiH$EBF4)~~ifgVUIAU}*jT0e%S`A994@4z2f5gb?$=H;n8#K)(Jy@oR^qEPy zi5LEQ5WIV#(;v0I1iDWd-1FOeMRVy z@1>Ba;F-v*q!X&Av@!#v6!jLNUxCx-`j|I8N4~aVgLk0Hrt&4F3C#zygKHniR%nmo ztUAVsTDwx%@Lv%v4Y?w+$onYX&!6Vh?tdTuMgMI*P2^=Q4O+){n^gVWunI*=oc>x# zqb9C%nA(kku`JsL`F8NtCF4ILk;lLv(BbRQkK*hKacsML7|37R;nD)_=doneK(TJ2 z7EJuFuY`{wJ~}-cK~xe8RzHH?&*hd3ojTMR2!}a>=9LYotxp)4^Yf|Z8j;V%wN^n?A_F{F78JDk>yqlT5iff zV&oE%RvcrAd+-|5rE;=1cb7d%);J=SN@D+0*iWqz!+9R3_y>Oh`Rb$(NFUL z>89UUQ(3tYs+DT9&Fd_X7_bbg6@9t`M!MuW=2DfasJtvqzy)MvvCAmB58S2~Nk2l} z@E?Wu%VaM@aa3ZJQSfKN^p>S3zB(-FwySGi27xef~M zAmn29TI-j{OvEmf;0Z18;|q*s{8xOd_NEquN=t@5v8rn;%NBTU;##=wvO0zj9K`8C zg3_t*xZ^+=6hM#=n1FGN{)$ujz^=;dzW>%-CO!5UYL@3fpT9S4@>_&cRW#msVZq=3 zDU`@_Do#KM|szz2IM4vfoYrxMtNabQ=CI=@Cshu0{zA=HC5i2E9=(#}_Qozv|{j@G%OU^;S?7)+i7i>X4WaHEC^g|B7Pc1jK}-CHlo1z&Mq(%^Z*2>Lb#RwhI6 zPJcvRGy8=Vv=<{1G93Ze7mp8B>Ql{`Pe}T^^|!3hXW;O5QdLoMFR8YpV|7L=q}FHh z>av>{-_p1>KUkE+flkGBQo<<6j{0x(!f@jc8V9q?Wwd`dHkQoIg@QTr%u%!6$YVWg zzEz&xYT8~y+H5|`Ib=PWx8N0qH^Dz|Q1w9TcCFp`@t738D3~Bmke}oD$6N!<4d%7b(!&bd!BAa5&d zoXlL*etE4Vx9cMu9Y*FdSpAK5_Im@-AirW291vJ>Qk}jQqUPU48q0f8-r!SfD^1tcUoMbx79>%~h3k(Z$?M>(gK7=(q1JPM~qrswj!O#wz z%B(s7E1*&j7ibltE{&Xhax>o2;t)PfA(6RmH6@RAot}6F8*qdOEW_X$xpU z)OpA%e&h#te4D<}VbJVN4UF8CWS$(a-toWEAl(Bc8mbW+rxt_PSw*>O{?@;dcQA3LR2Jt(^8N)~#|Qz@Bcg^YAeMjW`;%)O8E1Zqp+IxO=k zy7=;EaL$E7%x0bs24C(IUAwwi2>T#rqezo+5QZSW`2Xghmfk_=QD}gx0(loCIF`!w zqOB7u#7#VjhpPs5- zI79{7-OZ(-;Kb}1Oa45z64wciJ&G*$depfr+I0qyK+1ukA+yAM5!Gw8 zcJ#pEBWvKKV|XZz_Oal=q}M3;>_-lW=n6Ylvnw)rxTKNg6}8M|3`@+P zx;7>F**p@)uPL7~pccMbd6js7RR1MC#liC+|#U_f9aEPd@q1>YPtV zmxT`=NL~Ea_M1*1YM!Z>WeZ+Uj7({nBHQKYNMdi?Sy>+rNDuJ^tp)%0K{I?G3i5$7 z>((eAW!FACpK94mQ@bV`IP%>FCUmMe$w3{afs8Ey3Lc2&bw)?_k_x1N2%a2RaiWSxEsJzJGcmRZt5`at&hIC<#2MVqA7y)l?t< zOvC#Oxz=cM$R?hch}GlJeh~+s<$0ydaxy5tGfc*<VIXNQ>GHdr;WKRGc!HXaC?mgnP(AQn#KUsQJ|0n)bBpmc}bLz*!-d}QwH zg}pK4tyRywuCk{u)k`rLF~BeUBR*(w3HVlN*vBlLUQSx6i#d7Kx%F!H$5A;$T@zJm z-S6N@b7?&P%{%nkG8Uv#oa~`hF0A>`BEGkqO)fH!Q&|Jrh?a#U7pWc~XcpPB5t+h} zJv$s*9$_Epm1dBt<0XdRTYyQns>Uy2(QhgFlZ^aJxj&puA3>}?I6HL{*L5XPb<@VM z3o2CmGD5g#s`(Z<{)#mwFX2emg~ATw+XXIPNh**UY)^S%fqQh@UK|n0sOY_ksx?)m z4PROR*fm{ey&Xz-EvUNtt`oD53yI>$m5N8eNBqoErE zogHI&CAJTFbNhy`27h%eb5_GTi_x2RTSTLC*npo{qLiE*P9euc^b-3FaWfJu>A7;f zJ|tXr7JQN*Sp3U)-DO4fgTC6G77L?W4r)^`^Ih*&8XR>qonaO> z(HlNR9(TGyIovyC_syA-GZp!NS-E~KPf#_4W##gui-S$5;-Fv0{%4H{3QhX9Z%#cg zYv`b=;c#TZ+Sy~QoJ{8a93Kr}-_q zhS_*zpUxJYF`YO{{W3i#Y)8}Sjgz7mqp~gB&B%c@J+7c2drM1`#XsRIPIf!eiqg=( z^6K$(37<#2Fly;-fp~}2wXCh|8?ibO98G?zpRmkUY6eo~(rdqg)M+Pumh3aH9wR$u zwr-_aqs-WLchhYVZ4K%NvQWabJp|x|*c=$!uz+I?d@@jzL8R1G*=y8*K=K`r1NDP) zgD0OLAhPAJRm}F$W?9LQ_J;+`L;vB_1KTi#KG+&H^5Bc{i500(T8(-s#>0}^E*6_R z$@z16@Jwq*pErS#feBw&H|iRth74O-bJp~;UMr!@9yc#D7iUwv=1*G26wS0BCIyp6 zEr9@aFjgao>P}nR=%qLsiC$WBcRWOgX;W%7siI3vaRZUPVCbx zU&i<2`r?4qza=(ed5EUU$Lpr;@-tWQSO2X>>xcGkuD*}=VR8QTAx$patb7zbKDv(E z3DgA^HSXpBQr!tI>^XJ$8;G{-akUxtyZlwu4m)U9vz^O7aNWOqdMcM3z@o$zo)o=^ zJNJKqJh)n!D{qodOPjiT8deBOBD{N4CbDLJ&^aVVmCn~*>N#vXEC3D9W8j%c=X&R+ z$^w!p|4bS!VO^*APxf8u_xL=q_{h1o)}=W=RE{pCpD=o!S?W$$oo7<@7?P#9%Qqpx z!lnu~fyCLo{Iv~_`oITNQQoVhF>m{Aon#$38r{7aRUxpgsghdgpjW9>78;C`&TZ_X zG&0@NV^|Ia^iIw0B{CmT>Q3m(RzWTh7*f|4mM?mr>EkRAI0QP1FjQ-Rk7g3?VOZ#DR8+qGgvcz%%Rz8 zU$k<12j~w0bbOLvZ`Np6Y8SOqHq`lnpK9b(IqdCpHn$F}IhTR0>Fas?RuB`ee&%Fx zplJM93#b4dUb{x?W>@LwH?XH$^THm;&gnmXu=|&ceI&8CjxGtX0ii5N()l&zPepxA zC=+CCc3JSjhhad&B4Bd+Vn1ouQ^)|GWc}nF1ReX|AbAg6|5YR7$%wpaxj?7smXeaL zup^Q;=JQnU0(;7L87hRmnmUx)LP_X%1?;?{+CyevDjwPEjaQdX`_)@MS&C6lxju!Z z)wN@T)$p4s0G;Rqc>RA0-;D9MrTUYYk+c@*Bro6wHzn26C?*4NKc0x5EIQ#_q z!iJ_~eR4~Cf^AyVd)L*;;a`k&VpS$1GBelE-YBG=m~779wael0vG@>Ahwbmfeyy@L zd!G^WUP2i<&<%Z}tdJ9J)cXIDr|I)nQ_X#*^=9ds^89-(wc-SDgp5;8)OP}ne$|cA zqdnfni>_aM8{64$pS(J~yVAUt7xDPmCH&T;Kjb;rs8srGYb(txQdSvc9=uep9@Y!?PKod($Enrri#y2P zLANdpjg*+$=@^*^m_G`-=w#2mPa$RLUx}82^U;(lnP`$K#(g>j)P>Whlm5OhA)EgXcXAV=l!G@ClN~TnUqpd%h zt~fZUwZn5-AH3h&Xinc3$5&~?k!Y~bs=>OTpKb9#0o_-Oa@qTB0eNiZo~Ib z5GG=OFg8W=_j0ASEt1?219@sR)>vbb@S3ins7ZNTpp|8L?cs8JlJms^zXH2QvrnOm z&gKP0<}J4BDKWHT_*HYy^Y&}t?aeuU_?V?R`}cPaX3$FKtoRGTq;t0ZGqP3lHt_BQ z+o=f5hG~in7vTL*>+gahnK5(@l!6C=9?+t(dUg*`%p;%Z@+2r^9RAC%gD{8C}B#{uyZYN8#2ZYw^bf&~fyiTbey-%ZWcGfYU6=${RdH5dr!o0Gs z+s9wGp`n0#MNmS=>d3@INVRZTz^aQJ$x^K@vU`-ff}cpQ6bGEE9WV&%C24IKjqQ8F zY1CCL;3(Rl$(Zxmm*1wd&26t+aG9E&WNobN`Zs|}Wjo^40=EOd{f+~RuwD`SpzU+{ z(V@~~y{YNT?-41b&CYp&&zGG)OdVyEE(IpS!sMu?O|#BlpC+A&R@arH zXNVf%vN>M0>0SP-1mynkhd(~7GB?3=JlK=42e`mE*j-P{mX6)D z!CDdo>1{mkbm#y-H0Al9T3Fgec#tBLbV`9kwUh*qwe?oI!<6L}7Sx^x^#kBb?7dH97l11{*d4W^%&zef(5%Uz zPB!2K%ftxP*V6!3yXQW3#DuPRb@WtCL1a98hbO!fI$GJKcqja?X~OVmQupki)&*nu zBEp~NtRvR7B03$v$tXO9TGKZ&61WtY-6j;roljQ3r*Vdk}$HdJYrJCrbd7+*w$ z&1m3uOpX~ix#zz90#0KMAl?JI)SSBVe+oD2FVlt_UDqFY*r$$I|M|Po$4_u3fkUtH z7hlzTUk;a^b!%F!#&=RM>-H$dfjfV6S%<^wD}EJV%wmjBsTa=Cp-clM0IXn$VkL&@ z0###h($W(d`w$`o6L_4;PVf^`@!SFB^pCS@*m7O((srNnXa>c3`Cj742*P+><$rGr z-r$&{ObU$zkYv>UF|&6E?+02!i;+YeDOa0XulsggU;Wa%ArnS?iC25XFI!p5zrg;8 zSb;s0ME1FSX~aAzhYpH~tG0c9qzhly^NWD5Sbn&zh)l*cVHKvEu}6JO_B>`!jNRV0 zypTnr=rrl%%<%VIbkr|Cn`Q9$aH^k|?QY(ip0oP8Ne<-}CaXn_gXa#fZ!TQj^9hQdB+-*+;BENNOcJK&2GTg;v-#`fN5!zc9$K$fFJUG zD}g|2>}?D@v(+j3wNq?OOn;E5ahWVri;fh?V)oXYADCnD88K_lOuj+e1R9LR7b=%W z3smtWN~o&W`1ovJ`d>W#;zb33*Z^zoLIcjS(yzF6E%;10c>W1rGcI`8aeZbu_U}oG zw;@9GC^i0owmE00;rB}>Z=5*fgs$uP3_9M%rT9Ijn|PS`YcV?HOk#)*vpGBD5bb2H zm(XP0OLKYYCP)4^k|f9DzD@HqMi6(8mq)*spLV$Cj;+T*M`+VuCZnZF+IYp~E+xRn zjDY9-rk%zph>=V)7y4^&RCimsdN1i*wI&@X_$OZGU??k{W$k2#^Ew&ii=$foG2DuD zxko}gN$nn6vp??5gYoI8e$f!!sgCrJBWkRDVmdF=%B#tXyLC#GeKkTvE3wd$!t&qJ zoyCsrEZ)ObitBQ^c5Ewi!##@s2hKNhZbL~|;pUhmYm<^M!}3c%HWG{cUWq+g!OEe< z;{^*h-CnonKZR>QhVLL)Pb1|#Y}2kw{Eqj2?1d(m=RrpEBPr`*L~v%zw6=w!ZWd~< z-1aE1pXzs)Z0)bU>sFid=b=N#d-Qw+)&@tpP&d@S*3_*Wd@{ck(-;D!{VVo)%8A>X zb8ITY%^9&X2TaNSB|EFm1t?YuJ03kyZFlP zd9l;Xjva|E1Y4c$Rs6afg@#72WALSXegHuX{T-*-@t2~eR+OnkDR#<4gX7{J;_@!wLYVjc(6FnS7+C*$3wY;RTq%z zYN_pJDed!6)nluxW1$V%aUSC=yJ{M|%@(xE55hGH2K{p0)@R)~9DEm~Xjoru_ltjo zcbjV|`?DQUT80|xZ}%~c^Y&O_O_MWfynH(Zfi5GNuqizu%mLCscE zxoIy4t)r*xk-huu7{Zx{<-QhgeV@MP;5sI6Cr%fw8muA_f7xoh_SH0Oxbh z_-K2yq+-OzYGqjVzCavuulvYl`n#l$_;T|kqqqx)cP&13b`-uWgQ_yxeJWk+wDBXM z*=;W$oohDZ@Q9@^Sk$NGfj5U28?nscib+yQxN%m4Xa<|;;^5oQ6^wILwN&as&ikSKVT?VRuRUVZ!35)nJ{Uf_t|Jg_SAGrB>TTi)h z)?Fg8o<8F8i6^%9*Mrr-5h(HA1jK>f;zEw;+sl3hS9DyS9;DW#hTU+HwKbVZ9S)1# zY{Gjt;zHcLEDrGsRN7}Qx!9O<{)x!WN~4kg42qa~x3+8BrI!kpP(UrPBXtsvlb2@M zR)xp9FTdrhr|A+dRY~ostmPAEriS*3#A{NXL7eeqOw+`1#*HMGmuU4gCWJHy;lFbA z0!aD&D;wYS#6wRAKb{a%{I(CD1d+>3^%Sa}OUgnFu)w3dnW{!1%BN$ub)*J*lXm+> zPh7496yCB+t`w1-+#luCR;DNU92YP4}hVg(&a zWY5@7l)iF!D^ZX4IMpATXy&ID|MW`gM6J1bd>ul~tD-R!MTuwxn^Kt4v?pedv*;v1 zu=(8JkYEN66x+{DiR{(3X0tP2YY;o;U250F*+98y$^k?BoGNM;7zgN|@R)fW_~dcg z?eOY`W1x8#Yy;%%sQ=ULy$V^4oyY&e56jRx!(RQ>=jMe{-PVgmm_592p2yz-;~%yA zEws)1U-%(cBU~@+$EO6zrJGbp#lL_#M zmR7t5u-{D7Dx*Oy(Tn##clzDxoq?*Rh4531533@&I7fQNLe?DI62e{!{qvkoSDM*@ z*P&xTtQ{-U?Kc*M6Rj748nL`_nfsv2Q!^Brm0>VwWu+bbM;AEjKiTDR_V*xc+5E&mMx(X%Ue^F;%qyF8rn8?b(7b0Z44@?;p^OQy7^5@p- z(b{d!353jOnEJeNbH$FiLiUTsjz8ABY>XL5upmDbt{R`#8;Lvk@o#)1C1*BFnI`RZ zy5BlgT;*kmavwc28wJrnG@6Q7ah&p>6oN z-Rznj20bhO3~#u_Pt0Kf$w78y#`mBqv6&#;0lId5FciOz%bnaH$gHF=sGoQw|B~2? zJ<6&~+Rop&w|X{7o4vJ5M`NHyhxX0;V29B*1)hVeqO7SiDzxS=|J-IelQPw*HU0L3 zO?v|B-qpc945{rI|GL2_y9X$DMn$0vOq=fkLrn^qWv!)@m-j@ z+0-r87_2zt!8N~tX?%=O)}50tndA;e7&b*%9%RlGI89C1vYk)2*KZ%Fc@7Qcuxa!A z+%$j=G)KHcL>)I`I1x)7$Z`80q$we>t?Z zYioY+sVuS#$_cgj-$Ss&WCC0GiFc;Tm>SR1X4=$dh%~j_6FD+eqnaCL(m#IMS7i?^ zu(2MnCg9(s1(GOkC;1@QbspqLy(=IOpWgA;##^!G)#lto7?!p{d2FCBdBWSXN9Dxm zsB(U3entT{JL>}{Q?8`(11$?CKCUF&gSf~~;oL|0v4)k}cE}5Fkk5;s@_p??QkQO> zeLh7}4${pG*!~yiwL%sQ+;cxggSVhloWU*!icZuu%JAv-meUE| zXW1WoxB+#4Q+R#Fp2;uXd+bK0@?Faghqb$2IFDP8{rv9!AtCWPTi_&&MsNL3p@Eu( zx3zrQv=HH<22jBlDV1*CYVO}?(rvEAeruz}8-|Pv8TOc(O$^_t9`JNFl;j2in}|n$ z^eR!$T38Qmq9V0)WpW%*Cg?quc)w{lg0;@kHlLPkEEfdd$H$W>Xy$%eRgU|Ur$>iE zXwK{NxsN)SA#WTwmz z<0(d6hKBo=R+S_xc>3n*CbJEK*E9_p{GK`lw+zmpLL^QzwrjE(Aw}3$RdLN^%`p-P z+qtd=Ir0i`L=~sEI#8tAF!^~@hV1I6o7T0&AL4>A-$g7dG@&wB<)>SBik|T1ZcpRy zCUhptJW9STj-c+L*iO+Q(Lt!=8!}uoSNud?O}j={hd#4@Y0sI<2Lgnj5$P}yIXU`$ z!SZ{?{Vv8fr90a|k95L8Ps(6gBT7K39ZE!&oFS5;MWlBe2!*7wx{2Q?Kg+8@ACLGE z`o>Z;T}^c_EUY2i4_T&;PLAJH_GHaNzbV#R!=LxY(~I%$aeC}_q+;(0i_qZv=v0AU zO3PK+y!y6u>Y=j1AnF@Ho%}Pdg0xg0OlAnIt8BW%HP5;{ZdG2=10ZoaP~i%cfeLpJ z2ddIdaFm+y11>Bm!C7E;oYXkOSp!*rZ>Q56LpeG8Z;=o-F-BJU6=#~H#b+%E_y})t z0AI-={~U8&xhNzWZr-SnEiY;4Jw>0|%$_9$=_G@%Y&-Db@ja%I7sWRZbsq?BcWJ%q z*ncr-gArSmXtEw?@>OV}=llS9j!gu0L7!Is69_{61P!a8s6u4xJhs^?VMvA6F2cqTIdkykpu5j1q;tCay!Lt|@dvUKQmDz;yOQ01;nd&KYU6LajRjQ8IZ{mMxT( z&XKl=YeTkf81+E+!*M;)om1j$o^%Yt6==TN6 zGK7r`+ny@Wj~tJolf&`b`x`Tkbjm6JDY$JcGwTvIae)_KcOfZt z>y0G)ONClaI&nHKuiQ7^UE#t1w%faT=PNl@GfGgwjeV;XGYvJ_kIu?}PcGWh=c70cH}+K$=1-z!YQ`Vk?=fV* zOV3_+jDxl7E&=b3LO?D9q1VDv9Pr8*Hc@5wffIh&={R4^pL#3JA*MV zOg>mm%iUd#ddu1p*L|M)j*)b3D#X9jjki)BCMvhO#8^R&`o;%#oRYG5$2n^2_oB zd`D$ezVw62bZc2Z(daUn$m_L4OO?}4)H}ZwryOQ{oashQCFa<|UD!MUFB5sONiv;CZG@J_D?(_thS~&ZvH@zPnPxX&wC_8e?|0sZcAP z_ebcDO3%!v6>B32Njgs`DKh_wBm9;T*pg-ivZuYgBLBKAa#)LZ6Q)R>UdNTPACkR2 zCTwTDK6yf*2`$c&Z$0rBoy#hZ4CJ$bJ|^_M*fZ{ChcOG6{0H5y~-zNoY~ zc{<0xAw`*ebJ)M(#b@5PfZ(ex(vm}U_?p=;<5|+LGE+Y7?z)wYZ##9{P?JT;B7A%u2{Tyq>+^MR9EwWjin?;XcHd1gvy|4;q;T-`j)ND zrfC`{ON#qf)nUpg$Wel%SpRD~Xwtu|2rf zBo{s+9k-vGEq>oIvMj1$mlBp_;z#_bW)xuF-bcx9cR>Jjy&FaV$g5JGSWonVubW9d zLf=y~&Ds-3>d4FaSIL1#w*^Y z6q!YFppP$l3?>!eO%F_Ini9{Ohv{E5u46TLv(QWkvQKevuSb0AJUU0oQ;NCJ$l z1s0inBUXa9?7!@LER5tlxQeISC($N}+JWbdW}Zq;@er5Omcj`&qoB;ZSYP-CxdKxd zAx0F*r-@Xbn~hMcxmJWtF&#zUjOAi(p|-CIkJMzKjtQ3d*wRrF85s6Jgkdq^Qbwx? zKO9S<*3~S8ao0Q09B=*OH{MAGeH;q8f7i*(D3NS&^6yzz;z4{QMzVryOu8W^6p0B_ z9hF5RJz){1ow7#AhNg<7J(Z;p+SQ4Q8tMDE8X+q*I!DaRmzjzA6nx94G)%3FQSTr!1$A1E%$W`N=E`^8^B6TE-TqAr0TqM% z7f*3aX1)4OdA@rt^A+d&YzQWqaB^BONi7U#Z5!~crwp4Wg_k!EGG(~LD+ES`As2H> z3>Re%$}A>n;>+&{)j%RB5MrL{DEFBj&?goJ#?I;#2&XMN;y8Ek4#Ew9lFcBj|LZM0==O2}JobWO$km`&2F0IdVbk73GZYZaB{1fc!) z0n|G1vQQ-3H}m&$(oFE0agXS(;Hma=ZD=YSc&bhC@~YzI-^0T#NIFCa97Ul8n{u2M z4@FpCP3y;Bi@SF^$r!kTJ2#up16gsTOh6xibi^E9?u9JPn!>H0#TH^ntNhO)0XYFy;8T9MZ0BRCpf7I=m07hhtfbQ-8_QUDAj z`8rMF{cZKcgg%_izIz{PoObXy{GY!CV7gl(UGf)3Xz$<<|2ts zJ(oBrH=KlaTzO~R9|6$5&(s^Nq400H_cP)=b@rlm**Vpa96Bd)Mx2Xm6^X;s3h)f@h8-PU8?2RUTws)bQic1U>t^trh8=KOu zB-39WuU+@E&cb&sWa5vBf-Ru#5Y{3JB^X_&Jnfy{8*TBsEJRj@-Qyu$jE-;VWpt4~ zi%_|nl8BM_KI+5!I|a*EMIL@fP3$MB~gCxGdArKuDvB4=;=lH0vKoz)Sv9=$dOPa1K6h66m0Y zD>@T)KX%--7}@?t1p9>W5PS-cxh?wuG4b1OK&nYM`N012`d)G^o z+QQc=XPXm$`c3uLvD+Wi)*4%i|9xwvti{h%{lK$9Q7kC^81Yfy_tIn`wWI5Q{P%RJ zSiL3hLD`Jf?6aA*$lG;4#(dH}V7l41<*LGDfZ`}q26pLhXhZZJf=q|#snwt6#ARq^ z>9iQH*T$}NKF2mhInB168{rapNz%lz-VxHA?magLxZX7J=m%h&4Lc1ZU4=?h+z(Q# zuqJ?_FvKmcZcqsawj>0XF*UHc=i*H^90cqc=r zkdFTnxk}6#^%wzuD`8{P6u7scSdOxsp8o&H;~pbmO3gWl1Y}qgn!}K|Gj{`^mM@NNkJ&HCSWXSjyV2paa_b3+^CmN*;2M}PBPPbY(a>$1mL@34sG_v6dH zqEPr}d96n}sYWh@i98WPNx|L6)R44Mm4;l()Mi5LD!0El{mB1zX{}jwdV(pWS;7nr ziCQRX{nNItj#g{IvDja~{3qfaG-ohF#Nx^872C~=z1D(Arpv<#tHtl*iK)g%v$mR) zqBlS<3hEdb=b?w6$Id*3)rFGMG{KoTk|72uUmmP0W|;NPCBU8iK3`1^?PzoRM;fyr zjvxv8oAX@n4QwE|v-6_ZF|p^_JvEw%*DuL?K9OEY@QxTfxasteN8My5a!NH~8}}f- z#f*PxMA|ZI28yUh&~c4<3DGj5?njZJBNC$!pL%>IiVLByVr}BcZd^0!PQGmlP_%cO zSORPOanz-n`a14|1CE;OOZZp4dW3s7RZwc-%$>XvL-(|uGZsXbo&cZ=YMUyS=cm_U zO0PoY_h=4YW&tN*Ctm+M=c>Zo;j-~f%UK;QxeKC*Fq?ttWB)0fMr@6{wDQ6G9(b(+ z&8??L`vNK#7P?G#Y@|GEZaOKsYjppok8RVpvo<-3nZiy-FZ@SZy0_vWJ^-;r7Qt67 z4=9PlwCViRXN`#{3o_f^+uMiCD{e+Re$ec-)Y0STi@-kM&eK5L@#?{K#GSRVINvF# z(4uyrDG^M)A>v!$ig*@CTL1ZL#6wnvakoA`wF#&t_wdo0zGA%oXTHe1zA}eE5VaCsy8V%a z5#qH`=bR&hK8hElPO>B7t&-ZOMH8plcnx{MC^A1J3SSwe44pMi{qff6NO|%jhZBET z=x#2`i@>MT#;{e3;*SnOtQZ{YM%1Vy2r`u#c3CVXki`E$Hs+Kp+>xX!LrG!rCMWA+ z|Ct|yzZ*L$4dD!MvMkw6h+a7h^ij+#lqyUO%8EO;9y~?dMo#IeN^L)3NCGd=8O zcC~x-hmW7!J7kafb^NO8P>baS$e+clY<#-U*QRYnJ#WOLf7h5<+F}0%7|vKiIbftz-YM3D~>*|O@<4&S$K!BMBm?}TVxM9 zb@Zp=q=w|2Ya`b*lzoWTs^cs40{tIcjds0ZXFqkhy!&n~{j?;uHA|V?YVPTbjK)=F zb^=0*@Wv(ZguK-u@OBkbU#GZwIxfa%RlYifV;uK(>TtlmhroS=4w6}m1!{97etxIu zqQ|b8xX1q~;1YLyG3-Fs(=RZWF~+7$Kc#(c2+_p zYC$nsva!|AQABO2Q8nt;8Rs>W=<(tl_lc6Kn+L6rmMK5k!22)W8DS!uzmEsPxdv8N zoufFm)$H*Lw1HQg{LYVb*}J=I#!TJ1O{G4Rf8mvJ53v*-m-NJ^?(IPMsSeesOz`h+ zQ;4*{uwD3|xKZi$Bm9tOr)KX0NaC4MTY>a8Yl0Ts%^T!X5Bu_HP9l?Vj8jj<(HqRc z@NX}NYME;HTB(FYQr29<7bCPHk_LK)Fb{2ES+a8a@ORcvr7K46)Z|iQ83$EHT&+@+$UEU#LTnGg>db7B2aP>`LDV?+IFwRKE`r388yk;sEV*2?e!sBH(^p z@(2H+?w(wEbmyjUfgtD&F5tQ4pB;n-VekM>-DIQ$Yi=q;rCTEs-&}We`=$|UK8Sp8kpH9k(%Wg(tqCqK?sD&uRJ_Wa+B!9c-QzhE+3M`Vk&h zZYb~0ul{cO7YwYFJpk!^?&0d*fOh3?QJ?I`I=kpp7K3#Q7?i>A$o!9*)6XW!WH~ zP_oIXpLqj(i$@>Mrz9cu@)t|5G3?K|>AfslEn0rToIiH8>inaYNlv<)F8?Vs31A1? z0eu@?YlnKMvVr;sJo$%WNwRm1?%lHG1M*?|uo&p-DzMGQUX(Tu4;whTY0s|suO1`W zArUgE@DqK_bnO~REqd8V&;{e1>5^}L;?CoqMN|5WBu`mtQw!|3gXQN_(|1_g3^rPY zKB^(sjdb?1-&~xMfY&&W27Z4KaOKoSryoatJlG~j>&R(z&FNdgbl*n7HLzO{3OUWT z6>%dW`1^mP!Pi!dEP({p#%gc&ANR($y~9sW42((EmxVK+HLm_RWoUjJE)C#6hB8iH zKt`vvI~UvTa2X{gPTMuKx1EdtvpjtyZF7OXDO^XTB&h6i%O9-@Sbu;rJ0fR=ni+W<+nt1A|>xp*0qC&e%HBz$?1H*jlXzm0x+qBq2 zYPBVM3T!F;t?cTCtsQhdtj>6ujUUYQ6pR;x*DlXd3y&;b^%7FT69h(PA`}vR`(OSQoSuzZYDtSHl=H{U*3(xr^1w5 zL*GP&n_sImPf5I*e}2QO{CW*`#0%CZgVWz`dixacosAXv-;N8ot^hCr9uE}#3hcRQ zSbD;nphhRAXwl9zr*p(xIk8C^$zX9i&r3Pyo&@cQap84I-{*VSJXcDIhAt)f zU6i3~*@1IMN-7O6c`v!x?qelZ=cC+8+W|Hi?bf;_<-^72DH!LLsoEOh8k6ihzP@}j zM4x^5+_@jMgw}GrJWj$K)!D(HCDxTJyd6Kc%^Vr!J$(z6J7@TTa@*^FrE9f6&*Kl! zleBQAE5Kf1z1e$hb|%XL*E92j@yCTu#ab&Ij=X91!05&vz2mPl%cbex)O&D^3!y`_ zuBbqGLEQb;VP|9AIGO#KG|HXk;;5RSA?n7+XfvS#`OCbe4XwQg<%$qpPx-Kxbj#v5 z4)<0KQfGK`XI@7-#<>lLg3@NMoU(m_A(}Zf( zBB}MS5f6J+VU=nd@DHmak5@lZVIVf``7f12`XLd78hqz4oU?`$LMd`ZSuk()j?oRw zwusnYeas_`C>e18Td@I9+_w8wt;Z44emmv#n8vt#@}9gH0HBysb9DCc5%)BmPm{u% z!-Nkf@l!`v*A?(eOym4 zq{Dh_li(siO`BIlFQ3Pvopf$wM2JY$o&mZVos7CVbmt0ck%1&F6jp=TcXJ;}Y-KD9?zYP>6t?H>2I($)X}Vd^{MnmXUGZMCINR76C9 zv~^HLKm=rl^S4w{5u&0XGa@2F7)C&LPSpycNK`5)BT*3|G6FJX1z8ahA_R!c5F&&S zMnVSJeIMW7`{DiKCw>x6o^wC?71p$fNQHl~r2K^LSUUiY z?MZ$q75y$`huSw@{Ztw=sd+nIQa&=2Gi5V+dQb^Ps`4Tuu771S5Vs^sqm=~q8q%k# z=K7q?x6Q;$j2c^zLCw+Zr0B;+wBUB9bXJUvxdfMJ&Z{u<8-9)e?O-aAvSY+HH(S)W zZp!4>7XCu`j!(o|^_LF&a4q%AN((+u`)(V0{9@u&onIz?vrYVy=*nPi#uaoQkG}G_~c?OrXcK~$|8olECT+G^Bx-0iT>`jt1dS+dpm-L;2Bv$ zR%9dZvJba7bZ_z(JFv$jE8PAUiamA9ub}rd5(*8wj;k?wb2XSqnTJIeb{iHziC6%C zDeK`u8sO1fiTIJYB{si3UNr#K^;Pv1j*P*y!k5Kqn{0#160Qi)*J5E-8Oc9pO)Y4N zsnyJpy17_`7x4xTgr`&-wca-uy-q0wMdNfGqk4S~zR-GsE02;Wel!c3mK_E%89Yat z5HH}NyVbh|IAcM0STd-&?rXijb4h)buWGgTqcYrb2mDy?IL+)DX!Y_JPVD~ zGn5sNxjG-yK^N1tzJ6_y7FNfZ^D%lWhM*Uuv6W5hdew-kxWhe)GD0(jNOGW@mZinG z->f~PRxB7u-gD(_2MV$FZSaWINJeipuz3|{5*|tw;&r;{F36b(23wP^+ zfP273weyKjPDANfWH4FR~5UP94eH1 z6g3ST>FbfmnrT6#5mn*btY-HZoa-^eX%A|W5(Tb4!7Bz%Bh5X98;|hjUSaSoDR=vR%4!PLE-=Hg<)9L(p{z!D#099;PowFF2MjSS{%{t+?AEgnohmA z;T>SJZcP*SU>&riH|%$_!ZiLg)0Ad_xB!Z(U4ulfj&@f^olT{1QV>KLSL(sm3w}0f z{&~rw;}RvwiT5lg&cD3JzdV!f?Eh~<7-sjpG|060_NT+2O$@!Nu4e;Iz6&tUN7ct> zN%LlMuhy$X`CYVd^{^^l44s-YuHK?!BLW80pfi@LjA$ZN&d6=5IWH}!0oU|K755I0`{@m$E zzTOj5gx?)Qz<-33|65i2TC2OP`*^XyP&I*SedWX-(U)~iqPL3<+Yw%roH%_=}N}QvdA>C~~ zTX6J9#YwBwu!bpwdEdS#&4l0)DrvTM!iT+^JEut|@s@^~VAqDq_jkG?6-9A+lPeM~ z@9!JPI`Zas%PqmX{??jC+6w4gz?;8Cq8$u2(CFcTqh+WcDI#~9ryaa1V@1(LmFO(g z9Ccdn*sX_gk!`L-IGj^48-`Tg81~tb{_>jBhz5p0B8JNr9)B66ZJ%&qA=YAcvTkFB zU)4avlFbNFLqns%D#JaT^vRpzvfY0UbX3pBXX};3)e<)S^1iCm`WpucJJ@`X-{`s2 zQ0cVnxL;q>KI(%8gFI#akuxdu>ibmjb%U$XI)QFj=xJPaavSMx?A?^upwu?wP%^!z znKLykjtjgvzc-{VBJ56V$q^&1yXWMkBf(SGYgY*RNR$4sef`7U0_sVrqrlmVvTWR_ zTh_q=u+wW4R&&_cSi}0`;>o$6lMZ7B)w`stQGKtZcLM}kr|-{97WIdQ=KQ?4(eZ?# zRsD6~SZA|adObc@ogpRp_9;#CyOR>jOG?`}E8o{48;EmACsLc&f_UhC@p#A$>XvNv zWhk=C#(ECRU0^sQQ^Hzei=ScL*CS35PQNSJioNJVLSB1FRx2+W*M=JM3Mx02A{DtKwYy1Z}S9$kS5b89`MZ z1(NBz28Z~}hgJFxzL3k`4^EgERzt(KFYGtky=eKUzscjt+D;>}_lKPxX^CzxuF1tq z?SEvUhAhURv(?pnb*}NjuH83(epwupc4^i1_JG@mG50I@%@4^^BNsj=wW`>~+d0gk z-Q_3I?%cG+Z)`)oa1D*+(vAN+%Fmcgxi>FLI_{bASH)nM5-BMrN}qYb*B)=rD-0M| zb(16go6~gsLc5Pm$Bwtw8(j1(BIn5!)cLpNphKQ}CZr`; z|KJa7q0aAMFS#RBN84YBnnx@^Z&_xsq4WbCm)~E1Ml8KNwC^H_FVTscA}6rSO!{kf zQrnpN(faG#p{?O%B7LW`L+t+2^aqi}%&}XN%GX!olpCVQGj0uJOf*U+=r+G@fn0yF zhfSI)=40P(d6$vK%Fwm$HUIeCmSPiT&_Vkot4{r!^c705m( zG}j+=K6xw&ouhU2$^G$R-zuU}Vf*e&S9(#1Ba!KG*0enYDNnX?vqF|$xP>r^_otzj z*K#v5@B_4+N=-iXVc4|G4< zOU6y!v+kAR=)r{m2lO5F74;URe^qKA;~51X;+~$8NrKzp;kd$|)7`|?4?3 z6d>mcePMVidqM28U&-_4?)ng0lCSmrs)fK>uQOd|%ic_ul5mD`fiKtiet#99fUeus+=N(H88Itz`GW9Rj@}WJ;dAp_}@Qu4Rm|d%Hj)LVHAvGYZgRT+~2zf~m zqRin3Df^rt^KQ|`tFYb0KCz>uxa>I{?7KzsJ7|Q%Q%R}AC36nqMv`<-B$3AFjU<`~ z&QE9vEW9%Q%m|J-!VDY;Q-vaJ=3+%$!VRPO8wUF_HCLYkxzq{+4RFFwD>K?rnl}vo zU=el`P#J+y)^8~acO8p1f53^j5BSwHI@|uQv9LJTMCCyPeVTqO$Ayo3G;-aCXDbP- zYPj2?rv3_lJQI`-|G=oVT!!u{5pdj8p6T)D`<{#KgoY#G*N^q}Le*;*kBV05$%P)u zV`W5L>092#XTR1$vm*g8jX;zl)U}}=<%u@U^r)sk^__hWb6h|ZavTcMw{@>>)bvD0A40&2 z>9qa`xGgA_w*+U!A6GtV(;009*fFgLY(OIA@$O*j?pQLaN}t2W?!p>q*@chi5E z{CemT5kIln3ju zjnRQv^4qI*XvOo^hLt{VZ-mF+J$q9+Srvy=9U*e197gj|!utQ;WWDVFxo7hX23NdB zPr%a!MVgFS1WlR!enwFX<~jy>A$#D-o2B|NkUFcl;h{UWNl54B&c&Escs9S>Br(3Ig3o$a*GoN8(vNrQIv6YcaOZ zjClIeBaI>vS)|+CSV2EnCf5IRahILlE;np+5;m$Pb9+6mAFhhbd%t7PH}MXdm-r`~roiz5}Xq(ou zFHnxLk#l9=d}713ck!xxqNq}}Xstvg;-J9EpuVX*-N1=G(&w26WGM<0^E=ia7HyDZ zPwByZFp-7b-yG+8`CS@mYtDVlu7_#p0X|l>y2bPttY?|s`VM=?yAs_a0IYe(ssM?U z-btAwapIxz-dd;f$}c6#XO+wa4}?(mSb7s|=*6Xfh);H9@3^J*MIz1dI83U-^MNXg ziJ~b%96&uvMcP252-D*gINdI!Qw>HwQM0ZzKE1%3QS7-CglTHbxT$zr1NA8poBjE9 z@fYni6dQLU=D+h%-nixKoa-LCx^RcWuwI}F-vm7B-nV2`%K?o(gCVfQZ|uubBNMP< z=ydv>Wu=kbnW0w|c)jN-wPX$xKNIZsq4&nOyT&)GDV7l^YCwu^uG>RIm=}5R8r*~! zrF;N%!gFzrOHv}3Q-x|&GiYDsJf}k{!Y(_qBC38#?NBWv@t1tn9RdSD|KFHXWkyv*su(M)&QyAD|Do!ig}4N1DbnNV|zi3t*;Nc@#%k7KnhMU7J!!9qpcW zVkp)5MBz|Plq``vCKj#2NB`={OEzmW~M(PlJH|@>fKboIu(gP%=7V)U645vHi z2ZGU_Hf42ER&(Kz`oKZ5@^dz9(=W!gF}_Z+3*ZeC`@dK&t-;jSCM%DTe7M=2_^2q5 zYW{w5n0cqu$zE~e2c(oNQdCkvpk@#xlRzBaSB5YTXa)cpe>=cYu)0m2wP3?HcLLG@ zic^gSm9T?edc!ggdSvPQYeFPJ+H2hdUrySk49wKpau@99?h9PLkbyVoLE6^&ayeOY zT+}aT7RvYaDpTzLEe9DFb)P5;{{mx1@FZUpfK^|O@(^iRUw2CdZS+7N15Gd#r+1&}3N`7tbIJBRUkH6`*+0NVulie+CqVgrdb} z?`Knoq7U5wE9ERmxX!zFNj*z;=X<0T_7$YKQuc*`z55Ap!pCX5ms`kt-8+;1TXkv- zzk6k39GN18&w}Je;Ra*9z@7hMH#Z$n7`wTIPBAXji)yIH-O}0v>+WADA56L%F$YpG zP!;*h;bE-Cip?(Dn1Tff4*8zuCoA=aEU4eQbqdYtwQvL&xI<*L54eNG|J5b$$B$U> z(vse809XkDdJOv9_?afP9kcqi`9)`r;{;m?(HP8SJ>LHI!0M)dRdXAe8}EnlFx-7L znX2%mDBSdunP>K`c?VBJSZ`prS&-z~)^pPT-%a2vRymR&6>e-#rh!LzoC-n_R%BQ1{@*?M7Awsy%<@mt3i z#a}jh1hYGYIB?T*C+Z5?p2ZL2px>#i0>Tx~GHq7D-3}vF&<1A1+v z@qHLMgRzoF@Vh0@Ek6p+U$o|puDKGVkGi*Hs(tIiu1Ak=+t4Bz8WcY}>__s9 zj^v=KOl&%wGj%U}@#K;yxy@Fi#-jm1dZu zP7tpsmCTC*{oJ#0oe@l&r-%z0dET8bfN%M-#$3(>LeSb`R8+v(@`@KV5wjYz{Ebi4E84><+&;zOD)5QGN<=0S~QL?0cTSg zQEOh6``&PHVR_#Z-Go|p+6pj@i|LY-K3I?bW6EzLr8YbXmVYfd_GEc=db}J5s+ndl zQGaj@f3KdCt|3)FV^;@yX@@vI(bjo4p|~kS%fIZtmlhHU7!2I)>hq1>E*21H6x4Vh zIb#6*qcR|Fd&Q=^CF4EVjh(nINqU|X@lD7>|1k^PG!?@rH>vs}xXK2<250vc6#9Aj zU35__*c0CgzNE#_ij?p=*=e*zf%~=)CXH6s6ecssG+OVZduN3Ue>GLzMZ|?{N!-Wh z+=Hs80j~%@QYrDt4FcnY$_Q+swWOMze(MKb);HZz{mz&lhiSEi7@+}Mfw)ka<2N8a z$kWmB8M#GurKE8EDp^>|A>xMZz?_H}0Y1KW?mC%I+aYyVihVF85PjMsg3|rY*HzC{ zv)h)`<&C&6pIO6v$G_QxDRBvU@gbo&_3DjJho9f(BR$djL=#<2;rn z$PqZBUZo*shL?F6Ob$F!Et!qJBz@8v=H^rh^lj3WYPo>s2lXD_n22fnRP~nr^3}_f z8>g03uU6dAcbXSJ%1`r-JVd-Xn`i0|Cf(%$Wr?cPF9A-rlAK#D3~40#J#tM{D%-d{_mI8=pHDHs^6(G7rR6!Y0Ip>v3^51 zQvX|3%;$ifpSz^7*ZR8m-4MUl{GLc7vZWYfHAfzu!Tm7BsH9^3e!LE@YTZrWEnc!N zzRG><$Xg;jm*`t7n~H}iK^+Kwy}iaZfMh&+yu#rwr70LsWMD1>-;ZAtG@00p>cszW zMKtKNdQ3~qQ`>tmgKA432OauvRgrrzx$L`Qa^q~`b`)w(Hss>1hG*V;s7nu#7D}gk zbGE^}1cQ@uOcFlRhP_PsQGNw6n?#X|@94Q5!>S;%-!nk+ zF9FnrpYGhXjfUbEhictx>f?#yo|(Vr7IcZ-2BVT3mObr9FUEq`8Z;JSYf(`_Z3>uE zeANyzRlHm=BRz~R55BG{)N0|;T+Ntr(Gqvm$5{eyj~j=O>DSFic{HR7Gie?g{`Rf$}}gpr?Sztrt=)$%}!L_aNG7MYha-Nh^)C zX#US#w5FyU;YS6{^L=iWMPgY^@ z72OiN2oI0|V&Y5_P|745q0BdD;Je8P_LzEKCJnep$WJu^LmvFM>ZULze8PQU1!#6Zo-Lx;j)$pYl&Ef$)F`X<7kh6$(dJsX7)tTF zJCikaE5xab73Py00?5__letlK)-;#{t*Z6&US%-c(iz8DlaNk$MX6rWPdvh(Xxg|? zSh2^8aOX{Mmi%VXYhTS8PaiG^`7??xVBc4tg)y49ApR@0DF(NL?)nnTT0gvr#M1v$ znL9qKp)vm#rz|Zp6#HdVIIs?A%GD|AJ=3zR4U|o0*13+K-&s!y4;Eg$yQczZ!nO-8 z=2qKSsp(IrFkPK?_QoESZFca+qjWQ#PWUp1!L42cSPxaMb6NLcdy8 zQ{R~9M0+ePG^o&=c$C;}@E(C6_Ti0#sm_J;x$IHDtV)+C86uyLJAAej;6Cx793QG9 zD}F*6I3p}R-*9Mx+6ZB{ zliL&1cT}Qb?ZD0NT&-f^n2AFHxcDOe%KF5BA1l&>AN7?UFWt$kv+rEqUDO_((?XCBLIH>bXB)H`Nb%QS4@0k1zh% zbh8mgU#)bG_Bk??1S;U_ukh#nAG$H$gz5y~x^+I{rQh6{?hVgslcz@0l@f^@FChO9 ze=^x2R~6~@UD;^U3!9?CiNwT6Pe(61QbKcPa49luOeW2=Th4j1n9j-s~^c zn)$t zuTUotq;JYBi-)7GMa30DS85w{j=UGm#zKEfdX0!DKrsJ`H{9Io&yTgYYoEey9ocU*x0sW( z{b9{QgVQb7yi?-fv#&uWf~PNKZ*fG8R!o0acm(bL*LX!w92s4z`U+nPd?wJUFX6&n}ukbY~-m8V93oe;` zkq_5gGMTc?K%Wee`zem@`g!EIreFR`gYkR1@@Se=D9Vsu3;@;Irl{4&(05v?f2UoA z);gfqy@il>IT?9@6tg0i8NR5euJ={!>!FWJ#h3}tt0jH4X~>&;uPSL zm1p}GjUxgpYaaCA*3x!}9f+<8auI(GpI-g#@z{&qAae`&y5yRp^NsWjRwqRGmJjSWUZnI6D7&CT>!v$|t z*wd@;ns;9?@_!+_P(N`yKI{q*;c)!y{r{M1ME07A;FPp6tpv;E7POsZO^upVRJ z@kDr)u_$9(EJM7 zJ!JeQP5P}dL2YqJR(v89P@R3vwKNP4!!m7g@j=L6)^HdUQa|$@sdX1kACtZEkbH}H zf=`hfSJQT=CkFrhY2oXSYkM_0=vjx~JtBYW{%2L>T|#^w#0Am zk=hWWO6XR;_hEK@?p{`}rq?#}d3RG-zSDE630M4-ECJ>d?TZrx28(CpqILQmz0_$j z)Og-FwHN4j#P({#H5Wt!{^~D6%5*KL=Qh-c&y3eR!q)z%9@~gN)Y2p-RS$bBeQxjF@7njwLy0-hYs5X9US%Sugt z%iMqGmqRBUm}FcEV|m^ZzomzK;;K1Lfdnxzw+LdsbYgvqWVxeP{AOg?#cqE{CIPc3 za%fxMegS%iE7fEp{){!98XLjA)Ep_$**q4bl+hwUgci9nDhb#ES@*xp-p@)7#_I{B zt6S85U$tewXPx_s6J2+B&t;IqDW96US#XE1E8U$Qs+3XKJPiu+qzqseuMTIv*A3LYC#z(r;g6%sOih zzHRp?cVWp>qSni4i!n!OlkzrF<(3S&i6+_44By*yyWhJh{uABf;ib^ z=pfPb>8T6l1I_vWtuniaMp`LF4i3_;m?D?gF2S1>PQ$&Z$D;&-iW(|mYx51NIZ1C_ zDGAbgP+%c-z3MN)&snVQ?$pA}Im{$-2PbVg;pV~}^pA-b`mXj@C)DMqsFAc;J5?Na z0o{+&op~195%}fx?BGyX6+6Z#a&OFA*C7n-DDboYB+#SyNSY_`3Np-d?6Qz58MoIa zz-YR@1I5*7z;dlyD{j+3!2>m9sUXiQ&e%%2R@nN0H65of zuG?;Vj9jPARYcREQ>x^+utXZ-pSC|2@k84Ux7F0QK@r-dMAW{#n<8aqO_-Bmw?O(_ zJyi>AB|xK4Ug80|1yv}|UK2yYc$L1SWUbgJTo@z??y-p5GG|>v-i|Sk&F^H~=!nst zkI^}@eI#aBC_$B90^B5pXfj>n4b^Yg2M6Lsxpf=8a0H;BDPOZ?>i=r)WVQ&$d>kyy5yyoo+U(`E@#J9G0Kxa)J+7U4cq7%-yW-XLVQ=a@$>Mty)>oiuyD^#dA z9~PcK7jRnmdCE`H%dJZ*=A+zGCNJdM_atqK#D_=rMegA+~r`x&V8 z6P<@O7F9O1LR$~cN{VTO@u7$-50EGdowO%119Yc~2$pH;RQ-w4JzW48^O^wvi1DOT z>Z1ZJjr-}o?%Rl#J59aOzGX`VBaA|(`Bb8Nx1iz^xdxTkezAa@x2Mry}v*& z7MQtPXyCHewy5;S1}5%xK#in~B!GK+2&SDy59aFl5zO_IV`-B6)#XJ{$?KtgQVgb` z`eXsdZq(NjOhe^^Z_jrPOw!3>A7f&4Aejw6pPoPp8r6sBzg6Dvy%x#M7;Sb!anh~H zS-9U44^|2^|4u}aLWF~O~J#dU|Kje zJ=f!TTqS1Y7-vLtsY)Pu;Pz28AGfl5w1_;rxRsRFHG4_$Jjw9s*_UY--;jbV4?66( z+?$YiZo0YD%Vp0BY@Gg3uWPU+0EUk@Yt7X)nmXT9`o=4@K>h~|XIF>}1 zEuT9|MZJ;8gLHYJie^c%fANDZNoW+M<^Zv#$ zzqRb58)2Ryc?&_jAs)DKQWm-*Qv)sUr`EQW_)h2bQM;~DEXxo2W$ZNDiw+8FB4 z0XyPl9F;^|NYu8Ee@wUc@Gy9ZoG6Vc>d7#984X9qNZ`*a>{_8%Tlv17$kR)7Vt;nE znQDG5j7j~Jo88o0k~LT^@W8AON;Gc~j%98k@;SV&?HF4hFYr$9+#3SvTewoxPq(FT9Rgg(p!+4q({gGR~;-dovMj z05+Az6U>I1=lB{KZ>7I|#=@nymzg$t224@9qSJN<;6WfFHK8;38{4^WsY;ho*8 zVoN(nTiP|9?A-PHm#T3`7y84ppHlKjA-U=x`+y=|f0LC2$!{MC#T^i&o%&VOKILU0C3QAxtexK0OuqA}0;Hwxl-7p25=WkuTo}FCstQcHP@2&& zH!SaagYNnL)9*YYim{sl&tV=tQDc1CI!(;E3j`u(hcZsJ-d*qyP5nH-J7E*Vo`OZr zG&hM?np(@*(RIr1yE?aD!)G^ut-pO1wf`IwQ;pD(?;L9T`o4}j) zhJ3zH^wGZVs**yE@XOUCo%QkTIc0H8*b9C_47P`{y`?9L8@!9(TgmZ4x{PD z3VHWP>C&YW8Bxy=FO_!j#5bL`3^Yl4yVq>Gv0}B(nXikHOL$kA14*0p&hT&`d$XtfzPa^~!n7F~uo-(?TI7=B0@C&y26f zxxFWrzwY>f`#Am1*GsFVVqJ2fsQYy_KSgp^{N=95UlLc?yH?*?L~~SO(LY|Zit$1Y z#AIr@=-rRg?>wIHzbTxGx46uo!^+HYJ+wuxJ_awY9X+zORNL{lf~my0f6>-$E~`FiffleWF#nKebt zjHC!@g3UTI(0FOuH(k1R#>GFAxM^h1M7DUaFd>{MNM9Zr+GW*uIjbw>7KdE+%eSBm z8v0WFkW)L}pobmtV>fHmJ2=dwu&%t0NF&-SUw=y@e5LA%d47?4j&G1j%_-nI;Hk9( zucuhI*g&KR5^1P@`9K`bBlj%TzdSb3n`ac=5N}mmK$)zGt&)2D)_Qe^4{hTuXAmB6 z@T#ux6w~wJ$jz{_GDTEa!|oDkgu;QFO}Smo>$x#EvDBe663F5&lputw8)=TcKZp_N zcv%~&vZ0oErl!6&d%em!$NSbm5_u2jC9n4=SJ~9ucCLwT@-q8Va6tV(xS6Sm@W#mW z_?w-Z;-mFudD+{%$T{TktLyWe5turtq5OUGhsjIgUi02VfVx{0>z!UM`?umu`=Dee z6GUHDwuUsrUZAs5lwvCTTdvAFeY1nidm}CJAA+8xnHU~)HBxJtSx05G`d_anzrB3D zNsFPK=!VitZ8sNNvG5P}3eatol=_aYe|Z`ohK`K10~Vtoh6LWk;G~%CExdFyzo!wV zi}I)?uG(5@)Y`n!`{*m>5k4+wrpNSYAy`v?(7SG<)DX=YjRa(^z#KgsiTGKqEmwP| zKlzNkK)Nqq0d2w?G{6Zzn`pydOef$RrT@RIKZ0yJbxdE+UV0NWOd2#9r7JwPuf(fJ z;h(x+`ESoEq?Wjlx8$9g8dV(heca!IrPu1)dOTB}mHHM+dqzF_m*98N>>-}o$5aC) z&5;X5T}vYk-KlU4o{XUBLtdO|!5Z6}!h`Dn)v~cPKT6g&Bqrlt<0<_5K6y=`BCQsT za%Q&5UROl7Fm$`!`rPR{<*Co*Msr_7Er$%(i+wY-S!g8?=oI&b4ajDgZO!}o-7&^7 zF4<;8sjOvt7LM?Dx(yrZ%iM|1`)%toQaC{DCc%E<0V!#!j$(do#M;~w{3L<%0!(Ld8E=@5XA3zES%aFG zqLV-IJh;gb-r(6e4H6Zl3Of}QTj=E&%KyW9RT|MBPjleNJ?gK4@ZxB*Zx$+`?Cc*#~YJ@ufe5g3= z6Xj<<(cHcI-|2TGaW?aVGJFAUS4Rk-cAbBRIwr4g%e2$I?aXtJxfps%UbFw;six8( zA?4+B7G?eL?2$IL*MtvD9i)ZZ;t)@7nssQdP_&fbQpDH2NhAj$VeEk&bv-;Vi^(@dJXgEr=(d1dBfaK=DXC(_*SZUjbC zDFG!qMQAV~o^d#I&$%RscwmAdy8%<{xaj8`!ta3=SnV4{gA%~JT5>9AeUCit9(Gs4 z`~VDC#b^L_nxwC7dF&J*rXl4Vuo35xH>qCO@3ne>`$>8A%kL=*hj@rhiT(xS@MH<# zI;xwEX5W-#UR`FTn5O5~DQy#07LEFil{P`D)FXIUGjG$&5aVi8DC~%aT4Lv$)BW!! zqdZi29$FTKYimzDiUOv}?L$N4p$p}Vac>vY^0ub_f-lkf0Y{&y6(<>{qKPJ`LhJyW z4n)rs$2HH{#(b|)7ewTR27XREQ3JwIMs~AiDoHK{rrK>bXaOEOc#dGH%+uP>k9DYpk4f7=d<+x2HvQ={mGSRbP>D334{ zBZP_$`;2Nn3zYd@gS(Z$T8koezS{vW_Hosk_qwrhe@y-_fn5X3LHeDTK6lExZmVW>KFGWXy$zFJ2MlELB< zPdQ*;tubq?qC=B!pmpJ8iwpB5PPgm>;-aImSc#pHi=9jVdwr==BT>wmXG)C$_b|gp zsw$CYNRFx=*`LK0D0lalMN}akfW31R=7~pV?PN`&8O!&tJjz#_HCt`2{@^<82jxZ& zCI+)qr@~Mzgg@%|O}#H)h8XM^G!^x?&P!nrfMmB;?6Ced^7f9jg*%C)6fOHdmQuIILQW5q=Zo8A6S-CZJaKE~su`)x%q5Rn z*!SRg_syMtIWLA)Prk^G+*DIlrb@gk@|TjX={yLI1l&4AB41ffyOqzgQwK9V8l~y;6E^o9s zRK6PbjH^zPz9prcyZNxcnfX0XbyJ+U^5k)1O`a)pr`1l{Q%>7vIq1~d^zA(KjR*R> zEcyjzNW$z4zp+r?3Q>R#>B7mOZO47L>i07OkQAD1s))Pd z7m9O-lH|m0#^_5La&G>{Gwc?>CKbpC<4~rKLluE*-HLj`5*y$X^Z8!sMytD^$!axN z3e0FP$u_%Z5>24U0l-tbb0L^AM;X9}1KR;2tu){dVF%UMFRv_aOVPRX-yaV;0U;!q z8aOIUf1>78TfU!T;2XVQI)^zQKsI1Mi5GD13%j9xi;%Tun}8~l%ZHvEn=!zxAUJ99 z8^jVap|kJhy#nuzsV?K5E2#RUolII#zC*WZ`}wwkR`#K2I58=B`mX@ts~jb8-9^@4 z#as2H1*JAqI2Am?bA4X7z2;QvLcoLL16L*yB1}3qgO~m{Fl`!~UD18SUKRCmhu&nQ z`gh8K&Pk(7W+=_GOmFH|`pVOZ;&=2538a^bpEVBUKfA=eeC@wgy8!hOv{um_JLCoA zY&Z(aLc>BXe@HQf{I4FgZCJIs{0_~0I-|tn&L3X@sRJ!2NEx5%;wCcw%DDo z^~*%d@VEWx)*3}!EnS>jeox=TjU_FPr$`yb*n&BalbjJ33-cu6Jo8$u%4P`|(n*G0 zO?Jd6K0WwalYKE$HAEG78gnjizC6)pAJB!!1d*m`vj!D>)DG#tk$PCPsiM9fKe0Zm1)uzhhhj7Df6?a)VK zDtliMtD)y$T&)?x6ym*ak+UgvHeB%=Q9^f<;hCBNi}y1X1K_`)hed~BB`$w{6z{P4 z$k9vtwpCNNjJ&zdbhp6nPNm7N&+mN?hNK%BeQvtl?(wPew0XqZZ2z)FQu4~vze>fq z=I#F89xg#2d;owIq;)AW&5C~wl9cSD@Qc8$+T;)KVnMUJwAJ+LCDqfp^ooNecIokH zG5V(p#3ClwUB?t81bFgN+3$7H=dYLtm1o=cABVV`v{K*AD^jlOc3$up z1wF8p)ld{OqS#w~yPTX*x~E}@jo55?FD!J)e%N0~$(q$5qx>1!(ufXd^VMX|Ru^^T z#})ZarL~xHU;Rh=(gNW$MHM-afTdB8@jIoJJ`~0Ij5rDdWcte0@DM?tUu``(=;N|) zaR{&kwln)@aaJ9}?FDX69-=1=9vimn&Sb1mSTbDmaG7QY;SU{m^>Ju{1K1zA>4P7- z4)&d7l4d}nx?SYOm5j9k@*uJd7(G)0Tli{C&3gr24^5?!!ZCMj+S8p?LFbFGx><)g zDLg4mh*xW`!N#G{%P#pYxeXQC5Pg zK2hpo!sy9ZNn|q#REliDhm&LVLW!4pxRHQ~or(eE(@=k1#i{e$<}75I@oD?F=l@B`cOq zk-ug+gi7U6DyYziZD#gH;1Oo)`=s{U6NlTxK`#AUzQ+pl@k3RS<>yDTGjUBx@$vv3 zgDz8$Zdvlal9K!9u2s-gyJS~xDeDDkH*jz|`T(loXY*V!l`R*_vqh#GA3C3>B-5O# z>Y5uzj(CmAaYQ^|UfFY2qqOftA@5hZ_VB_-X0tF~=^APIcb<9z{e&VyzRuzP!2Fs= zaiG+8Q)>f=!li|wiiL%%)efO&)R4LaXAd9X+FSD5rh1$9=32#DN}d-A)lZ|w@zWv% zeyI7Y6y}HT$WPY4N?X{0KriER8C3te81t;fnAfAt66aU7g%rM{H9IuRD61x4A*bF- zJhT-(;W*>@xs9ZZ2hfT%np`JpI@f)?GG*aRo0Z(O4B8M66P`|ZDs8@mTF9C|6poI5 zMpCrY2{Vm%+8wqE3`h1Y-&+QP2wEcJwKPWi$(hn~pWcN_yp2R6?kq0y-c*|Awg6<> z0mV8CX!Z}OlX{&UyT{yHY8067#VsHGl!yw7liboUT=7W**}F+b0^Rsyz#JVkD;vQ*58~>$(wmmjMdPEhPHaLe>UI`!<%~dG*D>>ZGQfJAhvc550t5@aO(7=sA0ZhMF+oSZ`$y78^?*R+DPPsi#J|6A!xXYjtV`b5PX z?0@;d+4KV?&~(?5B<9RR6VnN`!ak_X;Te0mjMP>;aWNU;+EfQVHhZKo%lZHC^xa`i zop0D!+foY{Euw%x+M=SO-~d?(`B|!{Ah;MZk^&-P1Z2rfj-!CgR2(QH5fKm>0ogOk zipYo(AwYlt0V0Gz0!av2{oa1pcYXiKWeDVO&htLc{oG@%uqIKvNFf55=g<+H6n>qL zNe~@CA3lc>S#g;vn)G}*O?!r7HL!o0FNwcg?&85?N7<((KL!S&+XQ7&4U>AFn~=`E#cF5olzAy z?5J7qd0XYAP7qrA8k3&CAc;U)&pSo}a*Q(|pK)UUt>mJimYPfRp}gh!^@ zQy;#lt=gKxN0o0e-~Ig11q*q#-Zg3L9X1Mm(+1kkZOVJqQK*!feTIas!mz9n%tq+jg?KoLdNPORSi4ce}}o=)aRba&afp; zZmcXUpbceq_#WjBMX{~^P4v$>=TqpIgq2!T)#$gz;rhc#pt9{Jw)#)M zx3XqX-MABsrQwSmO>Q)OtATK8=)!x>gO6|9zLyB53F;nRljD2Vqq4O7+m2K90uYfr z(k)%#>MaH#%>Mq&_ye_MfG-_#-L3gl;R-16X5~BSnHaZFU<=aeBiJ%sWf^eD>?x6y z%Xtwu@D4#&C48!ESRn(@+RN1~1zIAjrLgKq$YwLb45!O>$56cH3mb!3Ff5|>h<{#D-9_m;x5A^`^}#v!Fc`xj4Pj4RZW*;-g-p0 zzEm6hSH*2Ge@GP1t{U#y%qWWKk3aXXN;FP&&0INodmvI-#iwlz-CTSK>A3d$r)tD$ zT@STcFmO(MVr*-?&L}m7$v!^n0&u6iEA~zj=+Y0(!VigAfvVaZ zz#|h?3)eip)ieVk(M%t_mqbUdDPzdH;iqQy*9MCj*u!AiKuZl}2_*OLOo{*em zk}&yV_8bJMq7}&K*>!eioOBt$?WORQpXYd@92>O}pA2e@L|3c~@-TK%Kcf;LJ6R0xii^Ll3IwU3;VRaT^Yw>pfF?6B&K1`g zP{NU~DFM+sWzhs0Hu@Z}R<=9!YJ4?;!TjteRK!%YAfxuvIoR`L95hXM+E*DW9t!yR z^kyL(^X13L^{6 zhmyjG;0ijf2@I%H_w+8%q7WxSp^C;P*@=nf?B!*d{s}duW{rbXj*#^@xSmfO3lA2YH|;HiyNDy z9SwIL+*>=?vNtdwIP>wxX6^4ErSVj`G#&C3r8w(hBXL*K{%G&^THES0s(*Bb5nInh z_bg)H$-gt*-Dk2pOK~^dx(=D=ko8O*yt37fl>Xhn_t{h{xv~QnV%}wJTIvsZbog9q z#wu2JrF7h=?>OY7wUEq+g=~#Jr0h59KB}UvuqOU*fm!d-KODlz-16$t+29x7Z(1Q9 zvznE%%~@*uD?U8QwFqT$Np>s>ev{TJquV(HuO$gpAB!+J-8^#v#qKuDakxyC|E*%V z`B%{%@q2USpZsMEVdl^vQT1@U8`Ur3ypQl=b1gIYONY_5l|teK%pvKB`{b{E{1<4K zr@i{=aXzIg?gox^2(Y6+Nf@a#pM zX#d?P_~p93rNM???0QA&)Dw@k)@e(5z)Wgl!*Gj}E&aw8__dlEgql7oEpH-jGw*X$ z?3l)VP3_C|^F37jc);g~_d;@G>E+!WSLeLm5pB3ymB-v~IS-8!wR{ahM)8-u$?m{p zz^o8#?YA#n!zbAtq25;FJWUGHT)6)3#eZiFWk=f|I_{vUcRWN=!xGzCCt$C)3pex~cIf|T?N^Vs325MAdT4Sj_7(c9QV+h!JGR`9o|M@t}vl)&2rQQMlGMbz>V;uck zcXW2|qUquYs?=ez$wE<>FytFNR=-$}`n7Cp_1l}0`g^?n8z+V4M$Exu>F6v-X|w|olJ{*_q56~*Tf}}{tHQF+~F&5f6ZsM zSi43&+Hm_~asSv=|6FJ4THj<)>Pw>C(_ROkqd;8_-SmxWodnU8?bEl)G8qwsKGU7i z4xO1SMdsI-`5CT4_iMNicw&Cdh4}%N`Pnk8wU>3)!^prI8IZ2lq@Q4vKewe4Xk6EL z)x&e^T^ct1cfIUFB9-9;zT8-3`bYn`b~Gyd-Z%Tu_f#u+EMsk5FZxBNmwr+&KPF)4 zhATM%*uEY(g^4?KjNrPIvp6XuHTk`hBX17*SYsF>#V!05F1*_9?|9Q4pL>x&Eo$&1 z3gcv>G+!m8*92K~5D#RXADrvN`0Ej0fE{HITaFc%&V=0mt>ppDvYtz;i01K>r|@Uu zy(MvEhncwEeiI#X+ZPd!gn^(>ZKU>PtIkl{-%IRVR&ePd%yGrrC#s3Kq*INPX;# zgBRZvTTq;lv=LeY8c{tQzZaHeI=XV07s^200svWDoWX75>x=F!7516G{;OioUIi8w z1IOuw_>lO0f+eZfzbd|s+3cRFSa6J1SS<(#Y^imEKcxvRl(wW#BG7>XEV8(PJV)F& zmAzpSnvw^fa_dz1BO}gF_MVR#<&pChn>t!t?9F;_SC-3fYG2BF=>P6>`HHCtB{C2^ zGX@kAvYK83EHWi-Ge)VD`bguRG+XClzYTaXnT^g9M0JNLUVc+XEyqHlR-l|9T9Tiy zfHBa<_66#XA0d#m8oH38czjn9?0yS*h^C!)Q&cVrsCbSok7{MQMJ)>LISBu}%TLcW z{EPykx#tBxD?{E<%_bG#)BoSlzUb?Xk@Fj!1eO;b#SsXpg|Sh|%h|i5ec-FW_LM{> zqgw=BF|GY&bbIp=&;b^C3>DFR#aR$O>3m68IWRBzGOj-DQT39%cpOLoRe2g9InlG|S3=v|{dEwwQ ziIlay-A}&xmiio9{EB@0vfxFFLsWE>lAh2pEs8onLGv^f`}aoJCz^%s!1}Eo4gBZn#lx?G~Sfu8{&S`8Lx|z8^3f|jBa_);Y$PRB~jwi7~htZ-4C6Y1N89G z)VkxuTEX`g#=y1+_}K$y$6%}BpAtgp2vz;!{Eb)zaSD%5B3p4k$Wict2Dh<;k?+QC zrZ%5%3^W;QE8$j1qfpSN=uE`;b19`!Ng*#%7en4-k_LN2;c5Ok>wvT*ShxkvElHzj z=wg;(w)8sNjd8d(`j+otJl8hL$x~G|V}$0l_Sv9z4YO1w_=DQ1+>vvx?b-fFV~0#z z+UK>ep-dILmjDxTW6Vfg5HoqrQ}wnbd*Sh;4Z#vtlGKTCwbxEHUeUHgaVbe$daafGxm;SN>4kT0DW9%2^(2gMg>pys= zrD@WwY+F{3_K26x^Xn706y1k}e?6x;#YKq}=i}LB?s-QmySKxk;P6cQ zaM218E24{4lG=;9UbfuVd~*sFn)`rDw9<#RV~jfDx5G5Ub14s=1tfeb=yWeklujD? z>!D;$dwrhr7kr7j$D*%|F_uvW1guoQ_BP!BE0Nx2D*a|JEU9*ASy>hFn0|VxIhgjD z@HbcnEx+*b!Ip3z^f%u)F23Mo&7s@^*1p<$nwZmP`K%8NsrP)&TM52#m zEVQLEiI1J1!K>T-c%pSUZGm1J=q3xAS_elFM#B=b>G7Q2-hXCxx3kQEACQ+xprQoW z{0;%%w9c9G6zR~)t8){-*volOlbdY_BRMk*<608b0bQUrsD2 zs}*UQb$NR20~x3oo_kOz$^!#Xn&(gOapTb*2lgzOcb-rZ^H3f2UfIoW?6oQgX4~z= zF($ClE~>(giU8}IKDU^+iVC{ssfhTTdm~e#l@C$~{q&yrCvXo!k*F0&FG7UERKNz? znlb`C-WFcoetG?DQGGDi+oj38oZ)bl^f|Qm+m{mFn;|8oD7t@uU@-=eK@!VzA})YM z;z}4F;5DXy*HFHX$6OWOc$M_0ppN8I;yEh_hyv?&J@r@)GM4ZMR^+>OHs#DcQIB-f z2ROME4=G28yE2im4g=Hs`!=5*o~vtU;^sIVrBUL=EAPo;hyyb}KW!-1xL0zuJHzMi zF8_3!*Auz?pi-tHM=NpNJI6~J?*x(>qREe_+uMPS$UCv<9qH}-bBydMeb>J#&l<|E zu6+mf0(|^J&o1fmuKEyVt&7YPfxm`XWg^3=^>d8+xl1vXz3^29WHM@yhh0}*J;ibf z2ng~J*{{y&KAAHBZ0@)Hug3M@6V8MhCb2D7Ood)j>7N6d@Baxh{BmeYukefpQ1orb zvsXO{*60>+q8v{Hy0t2gSlbe@rr9y*O#}Syx>yQFjPBKSt z4VNFjgX|+&KHF-3JDRj^4U$c;3dnna(hpSFOW@4_lCze=D-&JTr;Gu274P7an@a|4 zLY2;~p@NPrb+Q0#novpnTWDMRSz?Vv)tvQ@R6kx09+FeC)3Ix5;X>(ZI0ltoyyZfv z)uqVR#B}2##+ZX!z2{1~_=YU4qA;Y~=SDkOwk|6XMlEfGtE2c;yhGQMKBZovuYg|% zEYeQ`Lb?@; zf4jTIA7^@*9F#JhdY#7_PKT8|8g!wArsrq-Rt{5Shk;DE%p{88-vs-mjgdDTDhy8M*_9Q3M(n?DrLX_%s z=lZql9X(O%-$>r(V_TaDt*EK}9kMGWY?Qh9rIXdudQsFkYz)ifivx4MzI{`+bC*_f zpt!>aU!6F~RV++LaO2<^*0c;$a&2l48SvyAWO2BnF|HwM3?jln&~@x}N*Lth@X%Lc zc!CtQHAEB?eu0tR&ycsd>jmmV-yNlcMrD4?v#}sk$2W((W_uSTyxKw4fi3*{c2+`> z6t*v9X*Uc3m!=AhG9Vb{oU%lEYBu5x-(Z)j-MYE){Buv-)jhzv{_* zo-)G>8|wD(YM%hr-7#xfhSp^XcT~B%CGgs`e4F5L&a%?@|F*-Ml+O58Y^NyQRk+yZ zXn$CNqLTRJD|L*Bh1WSw&-~_vbruGc*>!^ z(1x0bPpQowH$+3_W6aeS2`xA>QEx z*dVjWYvS^n+LXP07dxo>cL8Pq6pOl3&JQ=hKe&Fxg3XUpX8YDYSv`gOshd^;HyTs! zJi>OT*=r6@Kg1KCHc`J8|7afeoNe^^yQ6Y$79~}23~oc2UJ)rV#k&W+V=e+S9grzb z*6L?1-u`ry4%!ev&LtCj*_XLq%3h_aNa6MLoL4!hX!kyImw5T3KTVxtvH%t*S$Qa%ceQxVem(2pC2^xP_IPr&T{A-A`8QxnII&k#j z1h(_2Dk6YU@9aeKievO&`QS?Se4M}9koKeD_J^}cf96$?K*Toc=EhnL-VbT*Cl^fC zMD+i@a(S(-OPvw+VQljN?ii$}!1ifg{XT$nnXVVN>9f#iUf_$ zUXWcbF7%OT*-BuAKbU^77Ffsq_C->;=)9H5jB{hXt)}!CI0a>0e;%*p(de&*ZaUY) zyT-%UJ9zI}jDk(Ec05VkFEp;6pEZRr4XSey;49GU@J4XbOl5uDsfRBg z*9x>>_-ZoZeE5=pAl@DPsTZ7aw|m8xG(T0=T;}$sJZ*wLJ5AoH8n^-fxhC$9tft{w zaSxEX&ZSeN21p6&BS9d-Vb6Vzcb?v8IUKp;!dOo*$detgs%r!Nuah?0FoQxee_%RN zv?ureQIO3t9)WVQSucCkcHl_n-PQ&kOQ3&-M%JEPzb21+G4e;%>eF#KX^^tu+{kz z5x0;Zm$!2~a2~%N{lTL1&bKgF2`^>YP;e;oQOKjUP4|eqkrdO4SO!hHujJ6f$!i6w zssa@lU;nrwDzWm{)IPg)d(1rUSTsT?*T7v@rK;a?OD$DDZ-im;xIvV8V=|h`(IKd} zw;5|$3!mN;2~2;PCqa^&muYW9yD~@uf*z!G*S# zmbf%UiShyHw?q+e!nYBWwxy?aNy7W*~fie zlVTTyHr|_0Th`Vk>NuYTLMORp_i=QtbVrl&p-H#JsFRbuBS$3GPwvO^HMzt8xP|m*4dzTcrg-Y5J1GTHZ;lYVcmkv81fiXjk;%kAMAB7>xmz#HKm3$ot-N=%+ zk+Xe)>#mw!f2T)(BCB$Ox%e5j;d9%5Y;A?+)HW%qE*wv7whY;mv~(PzQOKYQiQWc6 z3t9CThE?e^0e*#yo}^5Zx|`o{MxWU`J%hosak1&)9o-IU86SqqMHnB@t-K_26->`oWh?7m2kadAa*sXG zt)}Et1wACv=HvM>@N|_u=q&Sk4^~L6#roRW7 z&Sq|?hG(pXb+()hdJs&2^KZR;oo-!nJ@^n{LHh@2oPqzF(jR`L29BT!EyW;ml9!40*>S@ixt97^cHrhE?@% zTZy&gf!I43KY@Mn^uH>XejOI6K`(nYll=%CmhX|!5F{@BEQIvpH%nf;#Gc^O0zgPW zTYd?t2KoxO9*Cjo{S_z8b=$y5hR+1Y$FT#;@2&BY}xCF*8EG1ykNZXt(E zR}yHAIRs^8k4R66de|THEWf1x&6(?5m@k#zjlz5$Mwh&GfKYh{fQTD1z&Ct&8~O<{ z?OX$YQ#hYo_*KPE7xuYn(Jr_`c5s27`@!v#r?_foHC7C4o0O~)^2_<}{J5{-gkf*a+n z#yMvJiI|aw3zteG*58IXh)WO)q0)d7xr8Px-*2Nt4Yq!qW+%SNr(h&%e3W_T_2+5y zBc&b$J~Y1r^x8qFs|uE66N65oNuw&d%zyG-jUQ)rpy*@@c$&C!fBp)4r30?Nz+Xgb z3hI*HdIT&pZ$AHu+Aejg^oo9k3+UMn6?Bj!>$LxR59WsMzbZOwa>rvom%F_hiV(H? z(%oyMMOA^m{AY{Vu8V|l*rR!!gqxH#j`BX?^JIDh0k+zIIWxL@e6HJ@Pu9 z7}^mtre!e4@Ehs8oRmk!2HB3;=gywg^0#I!HA3I z@_1lt-@)X5koqAal`aHDi5Z2P1U^5Cld_=~AcnS|tME)Lm90n<%s`}%R#NAsZdU-! zhZPq?H|;Fo6%ehs0`)W;Zw8Wx@Ovc~cRrL>6Q>d0f&aBu`Dw_ENfuBY6~-rQ-dtxZ zJpl)gv^H6Bb;6Dynq+0M!P(AT*&uK8x1xT>aaZpJZq0A}j#z@I&~QgnnTauoUQoCJ z{!~N3Qo)0c8AIC7XuwaQyoR<@_?IveBUUQpM@x3gUP+4JraMn@k}J|_38FWsIK>m# zdE-mk2oA;Ig|>>)|76(K`FNq~_kS%T^fZZEXmA=uYAU}W@t#5v;4lzQkJ=V75*ei0 zx$_l6VXXXPXKa$<2NVshA|g*S#q)}^ct6>0xqAzL_@#+C=KDp>9P&@mkN)Z+78Dby zk3gK)|0e$T;a7u`-6+P&tCc|uQ$bcz$+Z{4VHhC{vCqKrO)DTecTl%J!zP{|ZCZEU z23Aow-2f`^m~-5VwEdaPD`>aEnG5g&jP!?5;BWl_`et08FZi&n2fi#=*nmuMID*+? zLBNUF|7#$^;ma2Tp*RLpD5n%c7e3{d^kznROki9eudu#OmG?d@K38J{y|<`oCcpc5 z1Qp5MfJnWQ%^zn_q^W2n25@7ZQZ1g+iow}q=gmnX%SYsyC@z!1lndyCE%9`iiF4Ur z?kL$AEIT#7_6T;&c|6mtSA5EIX>muH-hy%IQqEW6&=Ojkk zAR{8=zyk_fVT>QtgP$;oGmvm4_&gLQsV{J_bMSUKUT||g)$^*nmh!ahi*%oxN8Q%i z1MMZ;W2a`>pQgY8P5B%f7tu(nJt_N;L88?oT!RTXaf7@BV&HX_p<;w0cD~N=|EE7< zV4lR>aU=P)|K^ZH+Zi*zO9&x!KUA8SuKqGy#-j7(jfQkS*pXu1t!)Fv6(0wkWJnpW zy*l?=N+Ht@f!sK!C8=MI`p7{ZX8>BOurhiv_pN5|B*gd@zwH0y3!ygm0A8||nwYZG zqxX4(KfXy^u&_Y4lpjIqu7+&80Uuf0s3J(R#=&D-Y^QVvjt=X)^Gw{mV(h^|C=tQa zZ^w=twKvjg<>^lcg@W?6Z3Zd!hn34U6Xc?Ma#r z>tL*lnSVSJgzd2IPkJb~DM8;DQ$7wC=UH5?YLM;A55QaLaRduOwz<>_9CniZF0_mq z4Vw;>?k$(dQsc|F+K^w$Dx{V|5tuoxR*aKE);&lFmlZEdL&5OXb5yXEH)K7T1GoS+u#T$CHR;tVmJKOEr}RJS zf<hl(o^IQI+~fd zTjL>LWRn4OHp2hI3|qF8qjkFeal7I&IGsU~B(_9&s_QFjl|h7|N92lTpUa zipKv}MOP3aIV)mG)f_-26us3!IFwn)DGm%HzV@>57>L(5ue6tU>CCV9yGkbRWvQ8>3l~)Gj z0nFjnMN4&!{i}9c65NJ)k)q5X1&5=(ym!VcD8Qo*myKqvsij1_i-8Go-EO|ke%s%o zhpqmsicXQo&63DART~ilxYFi9<#F!khf_o4zqO6UG=lYK%bi(0IKcESq77c`S$j-% zl*xX^`6TzeEBj-Pe7F5ZO`X;$vMlzX2GGvy&W-zYX>WK@x!Wh6Bp-p32M-A3Z3zX&? zhVN(Vk#&6NA4RBSxR6->W~2u`T2cHk;{`&Aytg6(VV{j8mFdvX+wJV)q}HY}DXHLsAY>c(OS@9%+%ZAuo|(#qqP8WI!@lHyRTAt)rqdJxMRle+Nft1}xO)rY za=Uaf^@eX0$BaMMtc;b4y({$a>=5;ltn*NN9spKl^C#8=2gyxTrrn!o!OLqgdFY0!_UlXaY z*Yv9;vRMLfcrh=FZtgNL{MubL1OEk^t;`20de9AQc2RNp;*ak^51gD)5oUh@xR@5` z(Nodjgs7NQ&?>Bns1Ea{Pd9zOUCg(0`Cb_nR9C|bGLMCabi5c=RZEBnZF04_F$2%C zeNUQ?<|@ByEfr(=*PGe!4Ba5&gKHrbWi>oImsObIlFGJ;>fB08A<%I?lDxV>qXnoL zOtwaIm~}@pAUK)g2OCQTus}7bOCx;7A}#)Sd35HatlcBy0Tw&WTIw6!kl0XWLc!^M z=tnCWS|esO@p5{ne`Ar>*tQ=b=xNERCoQ| zrEvStH2$S0skBM;L8S3+n~x$Us=6N_VJ#e>H3fb1)0n9<=Q{_-IymRWAq*}MYSLMB`e|KLjbkjUH*=3a6 z^saT8;cXhp`KnCm33ohgfRKcMgS_tmJsrAaVGv=Dx*>;5{#6;Quc17&=G|L8HOt~q zP=(=H`}B_?9`ovp!6=H1?!R33537e321D@itteAvecol&`<)kqD>T)=-O3}sViwe8 z2jh0abkfoqr&EyZ(~|TzR6RzN+aG~1*%Z+d%9N@tLT#3xYaWio`ZqSWN7ZXD5S&!C z*$l>J!sA~Uy;;dsz^{SMD(Q-}MqK`L-t#P;W=;vun5b5$8i-b{C>nF-3$9NNbfxbwC%ZS)+TOz#G{&*yB`+eAm^dkSf+BAf(d{f_R6!j_&eZJX~f_c%OTyQ$GP~F_SUJ87O*749<$?+mMgb0Aq{t725inH5$&pW+ZoBQrE?MoMSzxQeKVjI2a~yAmG5PR4;&_ zp~|d{HemTMpS8}Bqh3_JYV0>w1WF0B*xAk?4ncu&+g7~ed9i6OGUjp#u* z&IJ7!_ezAD#hy6)A)&u#RM(~lB8^rT&JWfe?tY{?p{zYM>Lf-e5dy1AJ;P09I~+! z2gToT&*BIYSv3_tnoSLH6ta;v@_wCS6wBouCY5E6)2$s^_rylv+KwS=76%BRvvU4O zt;sOnjoSazxQkkUc|iz5n^M}FQ6@-ja2~8}FwVzdm3R^`q5LeI3xOnLu6Oj+gM*PK z%M(l%gmT$Wlxz4-w9ZWo5p#;{kjnDX3HQJ;bY9RinZL3! z*5LN0Ju0DT_2|3kmM4Fn<^BV0bq0pH0UMFzak%7nXvr{KK<%8|rzmfU@`c@N)7Qs# zpnf*%ZFWMOVhgT0NOJXN|5a(XOxmoinAX{Sr8Yv~w(B-9<%s`{D4=Q0D2xB$&+p>8 z=hP96G`$s`s(*Kor9XA~1|2u1Koyf?5ir5=If1m|Ra0)}<|J zX$APli;qj(_-jE%gAy;a4s*-j?q%iz>@JY<73hAY zGOM#F7hfAvUN0&>aEL;HYFKi^9#qFlVzu4P&u{9#ntO(>G9Lg};CpCG%>cfYXZfD?SZlt+A^iXHN)dvK}auFB96&eU>8e?sgnR263 zN#E1rn*0`SoLpyX(>^OPulOX@xfaxeXh#j>nNp}`KBnjZSjk_vETxoz80@U_oNqNL zZCNZS_r4rCO@pnTt9~%TXQ(3}h2ql&(R7#SP8s>o^}{o+g!to` z^a?_G8fn`N{zPg?WSWO3bCcP+g`Qf7PY-p<`i{P_1l;8JAIkmcN)zSaB=Tzu3N?l> zeZ6>b!Q;4=OdN|Oi0v;O&9!^P9l#k&FM${5O9)jJNljlKpTO zDpErk>3;bB&e$1bRq5?xTcNbpKNjbeGs~;IzLi6XfklRyegCss$eqfFBl%&LOqZnm zQh3%>^@?V7T%2B5{Lz1W%J&+aQDC;`__++1&othS4hY%_-yU^mP>x-2?v5@aC#_Oo z=)G~b7>kX|*o5{^l3OO4YRZL#E*s+(*8_*#`~mHbCGX_Q0J8eMFF)Rx4a;C@nf{t> z3|Eid*V~>QTy<|Y59|K*um}s1r}pF>KVPZ-)BCBletd=eMNf2HZPVmx zz3VGI_JOa|SZ}}Grt_EJSbMW+*_W<)XzY25&AQuAm1WFc{q2e-!o~)s)pUS+df0glv0C-)dJ%`wTROHd0IM1Y;<&t+c4VzTIHQpy;q^xAmPWAJWE_ zj8Q6wXeB0pbyg$U_US}ZRU)NPi#WkT##6R|=9ik?L6`e$A6PAjOVO_tsN$RJJOkza zYdr>F^sodqI{|-q#$r1Z;_!3H^xo4IY8u#9y{B}Cx6aO}kFvCm10BQK$&E<{^dJgf~c}4P+AM)(J2d zv!AXJm#v_=B=pD!Dk7Y2OB!ymrFLT%%?}Pd)g9|yT=CYgU!Ztj+!#29x>(}6&DK@& zsPLQZG1Rx6_>PPQ@Jmi(G$r*XZS`B8ZMVn+(!%uxdWvkb7qo;ukme-?gRR7{c$J5N zO9q!$LElMSy(~^6DpZ3N__jQ(%&udl>>{YU2?3ra`TR16p!$f+36B+N0FC`N++JdD z;s545_7K{u9d1V_I^kQb1=LEi$7kiWPKo>H=Q;2| z#TNFr+c1XS5V(Z6&|D>_xPDr=h-+B6S~qs0c!st;uxw^`#vD1;8c?A|68@ zEA-N`Hq$o^ym+zTK4CZ%!Q7kV>~?Jl9NwpMP^552ms|cfYxCu|?il}<1FAUX+g7UN zauva`g!bP|o=&gq!2Y-|G00D5bBZ1yhh}cWkbE1+(0kW&KhD=R>K`#HOn7?G&AaZW zioPA`S0+s}LzZTOsK0~bjE6@$({tw=SN41g->>Ll(Q;;ejC1<=Q`U+rb1hy`ft2Dmhi zZfA>oWgp`wAvEc^K13pG3;uI)zWLvH2BgRkKoN7i-==z_;0p$?{SSt5l0-~zvLBcH zz@z_ZVqCiz65#hieLpVPyOe;u zhzgn_TI4_ZtF1g4px=zP1GXijrKQCclzIFIxijM$pOauap{7%i1#f#$zgbm>W~lFE zDRmo$-oZK*c+j2cwx*AZv+jiF#53=nANXl0JTrXY=m4?{e^q`=LM@7S_+sAy(D<`j z>AZHf^VbUN-TW}*?HELEB=0@El=NfGdHA$N;$bskHvM9wGi!YO3hs}>a`Hyd9H1>K; z)kst#;B!!9rRP9?y)=G81b<90L*(1sk%VDjysA$M>Sg9>}>;30X z*9Id+%7$r7oS*HO(NMiG*ajX{TZpNP2_Y99SD;c^x^fgf0<)W|+$PXxk8^cxo8R0x zT;j~wtUY8C_02k9mAptUV*E6xx0bMmxV}qKGNb38(Cr_xeNb}C zRGi)6jfxDckVDl0*5w)PlB^D`s()450GsQt`4_7rn9VJwS_$J#FB4b0 zcV85j+v{;2G7@bOHjvc1j) z@Vd&}WMr9}yq!xPrY!E7#|L&BCCl4>>e*aOF_MQ12eAG4Ba&ZMsE}Gqqc((=6c`W5rjG#|ONA4$=m9uyjCm82q*hVBJ#TTg&u(-1{ zzjxn1W~_I1$okRZ2TQGotOLkizx?zo(C{~Zk~@ClHzsaA0!W#+=)3N9`5X;dloTgQ zoBwOR%RSG@hq2e|8R6q|y~m~6Wj`P;bzY$Q9Ci0{8^SoEfon=+o^m;Zvs(M|thnF6 z)J!R5ouHyPlzJqjF^eD-p)M-5hU5R>e{K6wLJ=6ZTU>bVS1;H*nIF^x%BqI_mG|1b zgoCP|*;;))y9`+wIO%sNT0DTX&lsf?re*BbPl_LL#P3#nG3^nHIB{G9B=Q6*#H}G; z;O|AZ&5w;c8E^7DH=^fitOmSM8XbYf!qk%r#L96BhFVO4iuG^iQo1D2xzU6FQ_Z+( z{6;;gpU2~F;9|twtMsxbR}4ty2?r6A;GQRoW^cG-mFHsIjA%(j;`o2Ne$V5TLx*N- z>T{MzH|pU2L{^59ny<6;6GHtq_C8=zQaG}qDiGK9;^kG^sHIdy(?&v4QQj>x(CXKG zQS^HpcKD>=)k-~vqp;WGM)5%p#iYySGAU}gcoNP(k9uf#QnJ1)5ph0TjElNBDAL$s zOAiIh*}rnxst?lmvD-!O6|EA&v?(n5({lG&a!9%PZd2in<#NyOBX&F4ToZBANX4t1 zpc!wwX)*z!BM9l}<)EYOF430*>fx*F0mN1-_OD^cIA2N;>bq#_8A&>LFA1Y#@P#l$ zYk_^Kns(v#agt9B@h*GwV}^vfaJ-{oyg9`(s?+OToe>Nu365I1?)9Netd7&SL>)38 z3_9ehsU>PTp?j|1M{vNq+f{jwkFML~XVV-I8hgBWU9HmUhIPAp6B>G{{c9^%S*;5hCtqHjqGEfIeJo`Ku{|!sm2k9ShoN zy;s>Xq7C>)Ln4Hqw*GhbX#Z3F!wCv7$7xS|G}4T{7AJ)s4IqvAo0UHwj?-sR6{p)T z%KG8$ZQHBm(7S=3kkWA8A~x$AbALeF{=Y&ciCni&s|dQLe*}H5W72^_>UPvfvS#_^OB}1!|~NKK*nmT0lvN7%A$MCMDO%Ze>98`;k(- zzB4+J^19sqeK;l%9a&Krr_R_(5%BIQNJ1kr1!?S<2~_t%h7L`{V+*k+^sh5Ec7hW<(W6{?*IYknT?y05bIe$vVJ3 z2;I$C(;XdROCPW{92;h=E!&8dE>pH}ep~h47M6da&OCj&RxPAjQL5ahngNQ%r)D)& zo%`9@Mbx)7Plfe={)6$GhAB_NgW={ejKl>j zChRM3C=a);Axf?*XbL3c7#I0|5n1{7TN@i4CkN-Ty$QcN6aJTDuxeduvmikYb@9Zer2HPzwNA}h>d`V zN4uh)Wh%1B#lcY4hV9eU7-}`4F-Zv~Kn(X|ho;Ua!XAUx6To}-&kU8mYONJga8j?NJ~4>57*fw9^a}LA>pSFuLw`7n`i06`ruGE)ojjg^-oZEl zI_ix)m&DST`BSnlRGBxnh92)6hqd~rXP@sPo>CmoCR51IM$h;`mbnohRS6+;D@AIDB>a=p&&>?i4*wLG(xa z=_GYsAwOvMDV-yGIgxmW#pj0QEPIZmemmdR$N5O9+o4Zr3~uV5|3lEBmsro zAQ>)~pJ$NK*&a$9F|n|QVta-OR^oB$Y-Fs`Ww}jN@N3QJwkJ_iKeFv4aRSR(nSkFVO}=pz0dDP!fxo zr2Ot<*fw&8d<)xmPvRlJd@*(5@q(0-EyXx9@*7jPZ2V8#%r{{DJhI}4WpTbp2%!+cQp1}~Um-s2Zl1NU+xh$qYadW@?1(Zr! zwZS2|zLxpQ^%*D&BUsns`BiBhyeREEifMcqZ&v7(hD-a1@3nX#W3ABz|M{AlhgrtI zktz*XoX7TK`D-YKL4zVx88d*gPGnyIHVWWo#{gA_Xuk2b{FIWmd`Tggn>jFJ9JfEt zbo{w>A>*lR+G1lt34N?Eb!Yew`93@AzZU$`(#ua_@;>aCDj5mP3A`x3V43z$;g0KU zCZ7g2uH`XN;-$L!zMb9Dn)`o>j5T)I;dLgK@d5LbzuhUm{^>eYc=`j5g-QFT(L<<&nV7eBr3 zUUu9;>+h=-#^zn@BMsmKdfg_2>~t?MV0=bsX1cx~F0j{haz8jS?zBrYxxGG0R|?Qe3{#Ja+kqzEZS-ZZU|fzw+yU&YP=FQcw0 zoK^X6LQH&W38*9qht&6ItU5#Mlh>nG@V}-FOC{NH*7&hVT!JqAQB@42Q5KV>h(h^` zg(6zWK!ZbRWAN*>*PzD1F}GxRm+!OIywA#570A}LCC~&qVP@p0!Z=H>9RTly3u*vg z_Z5#bM0v5Hj3GH3%lbb|y$MuPXY?&jYb#n&9Eu7esdYd_MFeCDNv&0?2q~g~fTVze zOft)u+*$>cDOD7dDIx+QLO{SUCNf4v2ofO7Aqfye7!n{MmvITfKsnUwRF+Px~)Q$`&y`fWFRWNHy zSV^-K%sqv1Nf%OY39w$$)9_Euia({UjJaES*4fdX^RJMhXkJ^;7b`KfS=jkbRHEt=b1C}oczBEo|LOQR~PwAF2o!gLiOgjF4EGNBM z_Nq)fmf3nHMmLP3ccA5C*4x&ZN8qTBP-+w8eWNPDI*{@Z^-Zu1q$Y%g4Q%ZEvcgPx z`8de5zxA|e!d)*&c@Ef&gLP+PDUSpuWa8K1U>Y3BHgFUT0qAEolS02b730|jk0SU$-=;>bn3xTv5 zw{+uNIHm)C`Uy@$sBPTAF}FeE2g%A-pCOxjQ3Jz1G1rR%u3TknvFse?9XRnQ&wGDK zTwH&);!w&GX0wzXUd_<3Q_^a8Ff<*(19+5}slNB$pL&SHRGc8zcsydj*Ew{>Ck zcPW$Ih1kM>N%et;Ki{tGS6eu@W+%5!FPBFgHM~nDJI^hOlG^^8ozNByW(CKEs*kw+ z-d{nj7H=?4A6fz4>q2OAlOAVbciTXlNOO~y(N6uhC(N&>0!)PMwCytgV2ey|!Xd4N z6N;IQ#8ESMXuU!u4dZ{@*>t8-w$b^wDC;Dru|Y}MTZ7?L*S;z z<*Cq(FC+J+HsemMAu~;vqdGN{slunxci?>^StBNK_@nr)NY=`3bcw4NbK4htB8CZ0 zX2G{qGrk-~m&Z_c1H;bSq9G0LcW@gK+@Fzhl2VT+80Kj#P1jT#UQb|_DA&k-QauM( zEkO_8jY9V~ZQ))$$yrNoL=TT-qI<<|YT^Gy)7Geyeo;85Tp3?O)~Ax!kj-gm0_pc~ z6$u^y?>hM(ph{#Vt>$Yny%K5dnG&kMka{2W5nXcx#M4?=4ax7MCsNa?>Cg}HK?+p7 z%^)5T&9bk5(_a&p``f6!Vc>~B`&YPhci;RMw>kq<29ujCTSdhlm329P(JNi9i<^$l zy#UHbzqZ*7yRuQc;D87?uxDxH>5swlN7NUWMii(zXObA=%fz1$`gz+y8k%U4Depf? zc62?bA%ZH%+Q11ZZuuW;myQ@iDI%Jt=5p^uAb@lB`ZC-Aq1mrmb0*kxclpInlGB+9DmlHb3 zKi2H#AZR>ym0v0zvueL2(kk_=5<@sF>_xaEv^nKn&W3h+0%`(Z}rM(v5-tAsDhG7mXE%Fa8CSgiuNtuzt@wTa6@ISvFmi7kbJ z`2Oy+y$2vu%Ms!cTJ1&R;30*g`hH!CG8~37aiJqkg|zG=BY926BVZR}O!pepH4(GY zBL9w`Z!jv}g*^x{w#`ew6{=XS*w%3;*pDnsjSD3JMiP52!{`FWX4eM%dhsmBnpzZ>m4@8q8~*c1W|rbV=c2y0ocEh-vU}sbz1{Wkn6CX(JgED8d z?Tv@Hhv6BjqQU_7gkS^QPo}zgR0Q7*eLblkPvoB4|M~~Yrx8aRPG;& zKWtV4Dw?*WM0YmqvfJR;7&tOwXVwc7Psx_YbiM>3)kzaj8Kc6w4m7w~rpBL+N0^o+ zw>glP&tXMT%ARD4P1jQ&AM~u;NmA}dPW{m+JT<22-M>8lkFi{oadF>YaycHs>*bc5 zm+W=~8k&3M5l?Gl3Vz)M$z%7)c@=od<3{0PtH*ZHuQARdJ5*6HDB@r{4XzkJfm1)- zR~Ha^;0i|FdDmGo)9otCv}235*2A;xXoPj?lZ#zo%^3u0-)mI`Y!H*h z*OOP25y&4ocO!nmy91(MF!)@tZ{iL(5kJY7|B_-0Yz_p~tSrR{=;o5$(F;b4-8Zhp zBZSjEPmlEZF*jpCgRq-GW(WXzhO+v=wz~w5c(q^9_5oJF*y=PJyZO93ajp^S2ZHAl za)kCnZwr0>WspY5@5|U1VNQB3CMG!Ds=#-}7qqKeVk=Gmz*{hu`%-*LC&qCCDOC&EY`&lH|@T)b-i^s>x`}N&}g_w+n9jRQX0F z*|BWAtyB&@RUYoG@3wh`|#3629up zi5CEJg8=0AVCdvI3{R?u&SJx+8euXlW(l1ICak_-gJH-JuGMpX17lVGYbF%Cz(g=u zu$w945d_L4*ZSR`x1Kh+I1>Q1je_j?!QkO$DqSVEowif4WkmRJx@(x!OuV#MYV`Pq z5-JchsCQ!S=kLn-)ThLj@3e7?%PDTGr59`NuA9`u*NUD0Mc)kG;uoxeIx0*|D;v?2 zO6wREYl%_J^zW?0AEJL!N-t>qb858(L0>8TA;Y&`q7FJstYMze?5EJ551%&qa6CG3 zAalnj8opn3SUY+mMu;;tb9L6*Ionv{#(A3Mp|060_oo{{m#X{3N|H?ORRAn$B{4} zR=7*Np}%LSM5-OMRwM(B^gOwzz;-anA?#pxBd!tVH?l$T<4Ie$U z6DK}L@G}JZq&MY8STV5XPw6lbQpH;0Mq=~!)vs#7+z0Zgcb)h=u*7}yVGkUzHmO@z zH^LfCVko&wj-VkR>l5P5kmymAo{)O)Z~FN4{_NNhUwzh8BAody-+u{#bKjwI+guaQQXoYby zT>Zp;M_IP$(G!N+@uodRb0K36iSL}6DP`HOJuXA?1i%`pE?1PQ{OR?Q$eHG8JKi6x z8+uKuI2g^#rZqL>xGr5_!4+%K^zx9AV~x+ElSdUcZ6lpe%SU)Kb$!S1gE&jQ+(~a# z{y_iGRj)s9XU+b(V7W}d;xXryoC?Py4US2bk2O5Bmul~)Rt`OB4$Rkcus?ds-;gqh zc5WMN<1}aO#|EFJs=fy#jwd860}pWty%8LF^yaZ>BG)YaW-8#FgAmA{-#2YCu zpkqBd16pzTT)bEF22THfWAPYi!v324Yi4@Jq8-CGH=T&`KL$b1PX#CqD? zExP^w&5`{-d3;Pq7^TbbIRjLtE-n zVYqFQDuT)zrSfP%5PW?Iq7{BsYhv|PTx<^jat*_}HzjkP;=UP+u+I;)N zC9JC&rubg+{`eZVUuew60ZS5_n5a@*ejkXRee(|Dct&fYhIC^=587Pa32&;A;$i}R z#^3Tq_$khGqMq#J>ap&1$6seJ?ugF%ts$YN*-O$zT{uQX5tFp#&V9^Zm)p~PUyfs} z+nu+A2erI#%%TOfGJDZI)Z?7gwc!|KNm}PjUGQA8)Xx(xOKKZdyb=X9>hH&Gjk-BY zTd~3K#B1iD$HZsr=<0g{=b=K1}4%>nt98C$y7gCy&C9Eu9R8hV>? z;Cc&!NrK?o_%xTp4rQ1^uur7f80k~qyLymo{lX8N^Q<;8tLn@?hTpRG>jgBA3$|s( z?y!naDRBmgjlkNEw@DSQLaVUY^L#^eoGuOY8)M=rIG4HM^|;J$ic5xs^6=SMW&ow7(>rtLI88t+GQ1B^TAW^oY7|g6Ha2{KJREGM#0)Ja8 zjSFn|B(AohHzPg$A|B*kN7tH_q8&~y7=zq|uWD%+>B@oT2wArtJ9DtrC#O z5pI^yvG48Z&%}q#6?1s>p^N1?m*CzqKti)CYMP4@b`dTM3td^UZrKYSPq-6>d#NJO+Vug z_{q!DC|_!WzGvQR;956q`v>~f!c;aMWw?ke%s6Dhd(iR={470JLq0=IiAQ!`q)Tm+ zj1R&x=0{-~6?t;EXXtwIc_2@VJ9-N}L|R*Ae)HGKO0Wvj2?6P#|=kpF4exNSA8-AE08^H9!gqs%6vj&59z_YI>${Hu;U)1*TdZYh&+4r&Z+M^K4 zun$?z0@Q(V8yU3gM^}gSSVdB@W6e3c^7r?BK*{ud81$Rq&fM8eHu|FXWu?*S9R4TL z_I2G9%jvh=)N2t+`ILxo=O zdmUb(z>}w~g*;Gy#;WmP$$|M&slD0$mgxr_KBlFfFC7XJqhEU%{7iYxO>8sZcJR3EVSr8MlPH|@9dB@XAb01{*3Rg=3iv17yQm?nM z&h1cYc3N?84$|-tY*)p} z73mOh+M}ASa#nbHHl2f*+;q(Xbm34uMg$B+$;*R2-RSpZ%hx;Lp=s&9vgjj)~;6$+bqiQ`*7$}ASx$-eqWe!pTdlG zGCaj3dBWOaQVMiKbIYvj3!>;i%IZlOx{iK9G6`53K5I}YxD#qbFO9n{j>KYLlW&wa zP|8QoG}GzfbNNL`!?y{BEZwC0pH7T^if5J1ez&4m_${O@E&dG0?v`ZnQbW=c&qM*C zjaB-N5C6H6vUqPqd{@uD)Yl>j#;c= z!+`;~>z;zMAJ`%z|MP;VVZRB9@3cjNe?B8aF2kSN&jk5G5DI!J?%2`=Bk{TfPzDC8 z_a)w_%K#%)K5>d6ShP;H(hIVDNNADa6Q{zSHZ20wv>f~{W3cXuj`kYMENEE142L_X zv;>x9$$-qpG4!d{=(c6k8Rn`-&?m;4bqX7B(5D~HjuwPddMl$3Eo zPH~=trX^PLg)<6b6|ZCjl?!0|RGeL`eru50?~#c6+dS6Ur$j;}ez76E`KYPyV?$z> zzeIDI2!B25@V!086?2#ODpv(~g!H3iVaw#ba<9ZTjAddWZv(UV8hM&5@joB zKcA4K`Y$0`e?HhFM8Kp#yp~y(nVcwCBrJCtjiiKl5lGJfYN6C5)dpVacnT1yKg^kS7y1Zjrc(eAQ)kNDxm5jGoiOq-XzF)F4KXjv=-s(@E_R_n&WTZ!$f zn&&QKzPY31Q`3T7VvNpB+X8JNs@V#VvT|k5i4tpgmCsy2Jpxr(-96S+zgk|#{R6uK6ZMgpi;&R0_>zEZ~)dcl0H$LYmJ#3`LXh<{I^<+Cc$CaI(5gSmP z?aDVTMCBxk#Fh042hSFLk;JM5^4Yhn*}T)7lcZyX$lmVI_aM!fyK_Z}vtriA15r=> zWnmddJpt(#RC?in5H6LJ<;4GEXtHF{N1s-faRt`D(^XMg5j4Q0%HfGWyk3NBF15LY9Znotf>DqwwJ{ z;8RrbW;qT`_g9)plQ>C6PGhY74R}`~+^2oBVj0aquF%;ab%KvQ#YLwou{4vgrpEgsm&kuV5(k|S8+O10 zDb;PbfNkSTEL$;!#zAdR=}6UEkX@)A^YfX0;!n$}EP=~w=Rr!@V^x3vdl_KmAknfz zuTWUl(IC0wd*t`o;5R)XZ@~|lH+u@zOk2apz94P{#>CvCYe#HPYnVA0%5neP{H@c9 zTe7p~yZKx%>am)qsP52>kFc}lAH)lBh~BtmkCf-qWoKc~2$rAa_=hm{zxBGf5TjAO z7vYP_b2N9uW4~U`{|*QE)BX3`&KaCp?22>aoL^;KjdVu;$@Tal>*v2^OCzkmda#O% z?Qi=!PQL9JrP?&amcPbl_@g$FBQv*rw=~KkxK9dCfY(Zo7TR>oss1VFY27`yIk{!lg*Lm+x&(XQlT-Ac))Yng-dwFdR~GIT<-K%sO>1Z| zrhUlOT<(&)wIEquVIVka_}ai#(imh76veZUAzJ7_NcyTKOjL#o$!S4ePyAryXJpFL zM_|F~^ttb%SM;aj?q?{s)_f$W|}i!OgiGvt06tbS9Nk8=jXhGnhfHsk>V zz959q4}}-Gunlky%F$fK0M~8jWuL6$A{K%{(kmoD<@l@}hs+G-RoE5s zBtB}P6V-X$0{H|~R7U{~S{`KxCWv`=m%PE~K_IbRoe?1Wg#zv+G<|u#+h=S<<{)bg z27`x8Td4|olUukSlh+&@Knj+wH7&Y$Viipi$bD^#uJRvT!4%Ji`^2TX-9wiIhTtp~ z%)4>=3T>F7cU+1s$Hs5Zx|C+J`?RAOYL^0UXri!{?2S@KnuUDl@!M$4Pcp0XHPwyH z`9Z&959r0lE}`kDyz~HLkC-XN$?}5Jrj$nZYxkB&H8g)G`mgX7Zoa`CQ}!6~euy@4 z6VDu5F}Zv>u^s)o8y{_xbe95i8P^OhY4t~X{1Q$UU>D?scBQ40` zmbBj3pR$<__NL|yXe70`C3|4aT@-ZsQlg>ND4EP$1`}fiH3vIf(GO%UEpL9bnng;yL!AP4MT1 z7nL!3IH2)ezF-JHf3#@o5_By)xosraR*eQao{%~-&W=0jmM+6;g%mTqPdGhV%Q*zD1#yD69p$CZ6*bz%7DSGqfL_5sqfbw z4-3!D4nQt6c%p~PO7qSXzQ>O+#FX7 z<84aLah}#rIBz?-lH!rHj7`?osC*37yyVXhjMwlhM&bv=)?TAs zv;{d&4qpKtJ;@&xm-Ikk(*raEIp`o>{h11l5pf0Bhl7&Gd!543>Mx5IE%*X5E3}*! z47Qb5=2uka9UcnoqufZ{C@u&U;xZ}@KheA1qmIy*$s<=gf{JpZ37-U=6U6lOaLHhG zxvNp3EfM}V-Tln@kTg^T#jSZU+_4SZlDMHPYa_dM$O0POD81|4j3J=~ zZ!}w$Y5P0ox3%iwOzV*2Xu5voAY%dFwbSVRo>jfENtMMspH@=!SY-01Zpi^&FKRmT z-(+}lyhJ4#S6Sh0CSuX|_cTm)gSUg1|5YvBD%Sa;T-9`IS=&d?fiIwerW#C5cTENL!Fth1eK4H1aU!lva1#^d__meX9%C%Jiqlk2FlL5(1 zYvJ;x8!8rKxmUjHlJw5T=GDi#h57g-F!-X++Y)tTnZwIe&1oe8gE=oVxHwScXS2Be z16QwZks*g1eMYY~o`kVE?_s@A7Kx08U&#!`0`0>Cz5smN5eH>`!Raec$7_U0ue3!x z9(AG;#m|?ly)TP?i9rQM0Dp{~?`YQ6_5m^#ko%AU9Oq#2RqY?j?g5mVa))ReBCVm8 zl*NIz_>tGZM<-G~=+|4acdlXRTkP`&|N7rTsO5qQVQ7@SLfI@lP*OZgL@37nQ>vc$?bu(u;%Gu9g zd;De2`gq$4e@k&cdE2aId2fBccvliSxRLq+J>p$3mGPc-F)q17CslZ*%TJ@uc=J2C z?!TINkhNX<8vB1Xq5tw}&%;^8rmAA``C^!I#AL7ikC zGD&vXD&^&`gp9>S7*n*lQ2&@q**HH~07>_8j4ttCRp$llIA+MMC77#LSMSj=CH52W zMDBRU*y_a?h9+J`-82SNm4|b)JR+Xt$W@wRE`$GUoTRm`e$ldnlfwXWVxDf@fH?B1#f$Gt#v>B-|amyVa(OuOe z5=(BP3qQm=_#ELq`Lu3(yC#IUj{O$8wuDVX?alMy1X!)7&^58a(<^i6JjhLYl(Two z=!s*C4-mZ1yPDr(xHOrHu$S$-aY5+v2-Y?+=|FtG@!h2{C(YF%tS5kYs8U;4AVYl8 zt7jV*#`K2J3N+5YaYM3vvB5W^|25(iOL~kxk z{Ms8BTwI41i6xGzXJ~o_{;^0u4ejmLc655NZI7z5tvKc78G%wMQ4KPPzb!V$R@C*({;U{7&m@r(aD2u3xI8Iu;}Vaf=-6vkc?Qn6uP* z7gEDGy^CnJlmCE@#Ryx(71^5GXXO#SU7k@#b9>WKJ!dM=TUdlsi}|%wE;R{dD7;x< znsskw!~AxkJ@mO^UIfQj14cBadAw-gMIk-$hK*wj{bCrN!eV1f9W&L93hkp>nMIea zu5{Yc8%E#6^7&G|!!fIy2Fq+zq&4t7Jm8x!JFki%o{ct^WJ`0{^?Te9I{sB>ZsKj@ za;hUe^V@dr)qA2gm8{a0f-dsMP-%$}-eqmvK7vZ&bf08BCo64x(Eou+?6oyoGh~l- z40k7DDylrruUhXnWQ6*Ip74zRsW{EH^7urE9rc6nL&=abMScFQ%j=Wco<_FaVEbz0 zeYb_tWlE-8gV@n?By)$t43R7s?*^j)b%ZSLhimXVz$Nbu`HuT=K1sXUTv41Fco|$rqq5M9fy*Z-|<%HQm6kZbXfW#{siJRcbnlUd>yQJ@D zLpSQlgQZVTfxf05gt*GV4KAi>%^7{dbkRJxRC7Ayk<{x&(W<62F&6eT)#kjz$HRN;cB@_kpuV(!W>*-Oo*FC4HaXh=dV&4-~%fbAmK+4$maMeN#OdATqu z$Or(!pU%gAp&pIMT(zgoC>`2DX%1!J^&G1>D04nTVFU*Mn&MoWy?CJx%|`)-8bj8- z({ucIEbopOOzor5Hm!>#BOWeiw5Ia_6cSWnCF)rx7L1NiVnoh~`#+OX^d+SuBiMsgV}lHm3#(ng!JRGTvmije}P2cl?n zj*e<2ys>*x>ym5cRQ@T$se#Ec-==M2K&MtlCv8ZOm=h%K#|*UYf~%^P(+Kq%0)@qj z7HJ@EXyxEafPq8K4Nr0sstF8}$|_cVb8*&Gmx6cz(a97@HH+eWl7z){7jjM)SomNH z!QO^B__@yvpterj7=&-BXXG1;`AnHJTbh2n%iwafZ%P)^P?AHTkoiEy4xMIn-C%R) zU$R3eG`J;%s{9zUsqi`B7~;TA9Qrd21vEwlOroQ!4mwH0vBRVpWCAlq6%&lJNiksf ztoVnB!Itcm<1^t8)8`(D1q^`PGr_1)nD{pGVrax==juj(=Ec#9?npuSn>5QvYw1PF z5Ap)W-<2nS?b#+tH(#VL@ujeqkdPeFjA275*P0pMB_E=XM07p09MX#s^w|HoD&fsH zU;H!vef%>EE+c+3aco9bbvUiSkpQo3nQs8hs8r_Gc%}L&=&4G=k zkkhBa`-4XthigV%gI*0DWtA)QPkw;jo{&4f!*fr3%o=>t{)+Dc%bA zn8c;;Qu56B%-I-TviOPE*1U#VCBsrsO(JH=vOMBlH$I*(&o5$kdtI5oUY9t-%C|R` zts=vFP6Pnje1r6j;m>D(aEvkg52E$t33|Yg9=W8O%i?@_q!GD62l4t>;r`5Gx@6H@fO0xc5?nN z|F~QM$mj3J@Qg?a1TI0CE_fjIMv3|G&VNR*Z5k7Q^sdTN5m<}^t=?3=Mo45Q=0G&&@N^sOH*}8g{&V$g9VSc*M}2dSoE%q1rYIKqq|-CYN5~^_E?$0dnm6$W;=_b7 zI#cKLazcv)n*nW5|j9CZ2RJhxtF6bR2bOa zxmdebC$ZAP5{f8Tpm4e&G=fFCOMct$`WA7!U|s`bB$$sjzht*OB3V!<_AVr zaau}1>SzDWy`65zMP{*(D+G@^)Gi( zr{PlvJF$8lu`gBXOu0^z%7!7&F8Uv)gR>>fX?Z^W@C)x_=fG^mT)FfAZ4EfYw2dyA zJoX@`ULwVPi7`EFy9iUDtOuk*X_iOtvG{R>OaID4lwjzpDg*ss?=Lv$xWOpWP+_n1 z6E-bk^9#N!$lOagbm;`Bgbzw6#x)T zP@Xf%I9q)9ExCRrlyFoB_445JWE-36QlLY0@L|EIa+ughsT4+yMq)K?Fl^!Ci6OV| z8rP&#>UJBktQF252HRA7QBvh9DCRgrVa3d@@=XgpwpH-$NMbeagmov5YAHh*73pUk z?!~@qKSK@}IWdFjax24@6KcRG*kU*z>}^SfW_LccaoP9Y{u z&)_?5r|ICCKui^W_6!gE4l;7zUe>brGqtl>-2n*OAQoK98%bm4z%M0}ao%?b%hD<5 zqD+hP{-IYR`BI`;`<7I1ClJE@-u(|eAL*MLO1D=@xnx~2X_zGaB`XDar5OJFRc!}e zPl|P_W&|TKlaTb!Lc6m=9CeoOUesNNdzS7SFi7m&^pbpa>#u6( z<<$V|5JAq;T?)@==XT3KT6=&o;(jpo;Xjk%f8`D|514rS`?@B#_f@)=Cl`}-%@W!? zMMF)@JE0`?#0~@Y{^S?$Cwpe%YL5db^F#keT zL3%4!Ys~-p?xbu!brYJFyYwgDBkg6t>V$rFkfS3t=VE;x#H6D_;&T5D{by~oJREBt z&1jFM5XzMqU0bh-nDciins3#VmYYE4z*n{JLnH=i?w}`LbZ)ICl zooE_F{3PJ$AvEI*^dqYqMpgd8#srbBQQH#_og%jlXDR9$aP5)9?d;>EZTEvXPCGW- zdo;1#EY`!0<#v{b;Cf0ny8Ax48G_dRW=Q`#>S=|Sb?i00$k)v6%i%J;c9_b}CUMZ4 zyDAPo=-jUj1M_r-P=dUh<>AZ#N5P)TBrnMDSm#-D|LAOp^1L4F8=P^KA|RG{89gYu z95&F(^z*c2xkbIDhVn-c>Ehk%{I|N=K3yz_KUF^gJ!- z48}M}#})C)%Sjyd&Xi{T3k})aQ^G&ioMHSl4cF* z0Y+5@$pP2+Rc(FETVBgTr`w&HrLG!9?sDpiWX>{%a4UK=Oa*KT^l59IOp_c1!KMe=OiIv=2IQN$Dyb7b;p{N89UBB$tWV7|<`> z*L!eq&@_|@<65{w+z%67>?~WPV@OwX5s|7v>o|Xp6^_=BJXOSNndmpM+Nw^L`l6ViLacx>$(U=CQ$h44GHg0kV^)t zu&y0P7$%_iI!+yq!NOVrkpVsvSCtB{94er=)pNZm7*nS7G#jqsu2)JqIUg27+{h~e z{tBuIx@!^xR2$Y-=RoU4o2C!vEKkP8MWQ`6;wegV>8Z6(pSwPJw)EVg6Sus`sN&{s z4N!g&tw{?R+xBnaS@hzQfd$rc48s|hfmuK|nG^1vP$p@LbFW_pqY*ItUs_G1`YKKr z+woc?NpZn>Z^G2kT!t*&CLw;S^xWSKS@-6&*!4~I47>LTpvod#@b~?*Gv>oy-IW7W zCqt7*pI*~5}F_9{mfi~Tuz5fpg>|O z#Xx#pfr*x{{??DQHX+A8sNEq)WvPHEwcdK2`Zc<1wVAZNzf3J?&3dd=H7U<%Rph*o zGVGF4jG5E5Nyy&^R4win?e5l3K(qgMxO>@;**Fi( zhQv?(MYr3$svg)MZ#$5A=))Q0$?g8gSLeDW@aY&1ZQbA}k4b(jj?c6?P!A+DXtvhSy~`0+$=N=+P3tgMzXd5?k5cSP9-cAcsp@NrS% zz1BvwDF{2XD0QW3qV`Ln9^pcjx+Ir2zr)p3^%?G^BbO}bWOD&_eY}e_+@zssrpR;4 zVlHr77Avi$#HMon#IeiS?A`7e_g-?2Yb^{^MMr5HTnQX6KkDkwAFi28=2q#-e5FWBk)-y52v&CDSsM|Hg{~N2+Sh5e9Ht38Nu;J3cGF}cVqZ@3}B z?cfj+(jBvCn&)f2szGkE)P)kTE8lk&+2&Jk>>f2wD`A50(C;xDy7mzp6N3fPbk^g* z52duoSKTjJ;hHB0pLts=BDq7864E6hNRB?>v8TE(0=`ZK)|5chNm6ow<&>PsWOj1G zauMY0H-$;S4Nu?gSb6LjQ+%mG^^&W_G4<5C9EF(xTMxd0R(1$=SxbcjmzM)3Gv2`y z>5TB)49mxIPtlckEn{Ki_h;fHIl>%gX@F^kPae8y2 zC5MtmkoXQF_3*pn9S4w(FRo13zS?*zc-`z@zHWrSK8sJQ@_?E3vnkCBykwMhuQ)Bv zt!eOB@PU5uZX4BsSJEe-by-R3ThKQDjNM!7a3S)#7@zZR2^w(1Vq`D*7FdH|{np^7&oGqsXuzp9Nx z!~Z)r#2*YRpDqpwpZs5csLTDccw)aZ_GNU~QY+F{lOzUY&qd72>(PzJ{BC+CIx2|p zTL>*w27F4lOgoPs(aOw{fvmd{vZK0dZoR$=H+2%U!V_A}2!OQA(y8NcZP@bJoVkgJ z5ZGPrt45@gR$fPQmKs!r=n@h@n$4HT{i7e#cMJ}~)C$_A(*8n6)(iLa*T>7$E79lu zf{-k$^3uf~W0g8EJ;e4H&#^QU+QaOM%Bf}Ua*Y!8i#0KmF_f&x()aOCij{E+o>IiD zy)`)4@c4z#4N&Qqr4GIU*NbdShE_pWB3sd_V)IL=86O^T^e8M3DpAV(4MLtzN|^vY>zZe42mVD?L{IKr{lI-S!vg-axJ^4}Dt~73HdBiZz{8lKjM5}8M?VCz0)sr2y)lNmXQ2q{%-$~^8>?(Qu$=;?K`QP zEOaAQOyxle^r*kQ^PkmQC;u9c35n6~UXiDYQBQWqXbF|!9!S&7zHYz1wz#OHq2^^B z248BIZ!#O+VEUDcb|8oU`Mt2|RASNHa*x`q!76BOka{d70LzSv`co;8P7B{K+F=cm zbAstOO8sj1AJo3W-Rl~E`CGgubt#sMy!I1}U9qwnKUz!Jgl$QoZuDrTW)4t{UEV&fpDRt3?&ih zDPF)flB-{S#ZJ6K_f_|A3KyfKEuFX|r@j60N0CFdic{d`C0r3g`H%`*9{Q#;{zFlt zAXND#nkVxnQSGxMaW${bZ;!Uv@AINaoHJ}yKJa+ubIDXV_xPK4osV@`b)Q~(QUCv) zioFd|Gtp1jyB^jx{nZj^ZFGY`VRh*?K^f<}W_}D0+{zZ`uk0A19P@Ynk63diwD`-8 zlT5ur#Sth;;Bn5hHg9Fy6&CT=BQw3}>jI}X$F%5;hcoL2g#Foz?Hfj|^H?`I++H#)$R6t(iekEy$4h)Gl=&Pc3EYxyg}li%8NWmIWE<7c?h(F!7q9b%X?HQjQpAW_F6@K4!9cer~sVSB4ZZlZ=WFEGI^(PRXzj-M-d&+HNic9jP z6D5g{Gn~RYy|T~q%?jNNt|C4w*SwR_zP~1YZMKySy{URK=R8XeB_H(T*k#5jz?M)gGdp8zD-qOkca_eGX^FcS3JYCD%qoe?G?~(vRrGht z`l7mVe>5&M|JxoVQ3a7M2Ng|Rb?;U@08p2qfYu!GQ0ft|j@`6mlLtd3(uh~(sfAgJ zLp`L$vb@|k?Qz2XMbyrC6!A=xW!}nYNqsG!As7`&coWT%8S|~0>^U_)5#uoibnO42 zZuKg!yxehe1}okeOBM%$C4<{)@m3v=sxNq035GgHZpj<4 zD;ha(m~p`nmKZe!KKY8)g9D|Tq6O(_dT>9Yhcuh* zv6783OOIL6lL>~RTC`@Oh$Bu9y8VwV9J2xjZL;o=>AJ(_9XHIn@eOEdumL`)EGb>9 zk0+rT){DiK`t=QsbxYpA{1;|Ew+eHJZa(vyp)fS~7RoFNGCSk$x8ute-NxKu;!l1f4kh!S>yICWxP#vQFb`J(c!%`pq!&XwJiTzIG0f}|lzB$(E=GukvKlntwUCmPo)9Z-)l z&P1l9QHb#CRb{}*Ly677CU3eGi*HmVx{)-Q?k0#>|Bx-JciQi5U>J0l(y$0S;V4}H z2f)3k(DGG1uOYtLFD!<6Rb>h1=RZ}ck^b)6UlU-6Iuwq+rHa$=-76GyL~o?!9i5)q z+QK^x0-*%`P`>q@)>Ws>x1!Tw_*@)>A`**m&Rf}joZ}`{22v&#X=sO@)Rmskr*HY7 z3XB}g9(y>p^PV8x2(}?{S5HLHah3`w7dp)G&az9!SRMnHs3y}|N{deqcvYS0 z=bf~dcoVD%NF}81s+B`UF#Q~@a6G=jxS__`(6Y8c#S)H0m;}|X-@b(EW|A!y=ktlG zK<*sOSRSQ7T2z``7sV8E{W82SlS%_ye zZSg#Sdzmw1k!KSE%Z5AQk#XuqIq^yKK{>Nj4H|7$&_gT_=k>ADXHIuGEUzG%cfib6a-6 zAC3!ah}KuJ7YiJ7FOZ#*(`tQr5`6r5@7&fs5h?Q9v-Iy0z&Ur+^5;C!NgMnueYg8L zk!~f4VUjKl_X%A$Rh)9MQ#OBUQ=)PX#*>RZ$f<>hGv{UJX=O5~+sbG)|D!!_H7!`ENTNqBh!(bj=m%$Y#Nf4N`l8j9xkpm(N!AorwI>i}jUyN`Vw0B9lqO}vrqt`|4A{g zi|e-@un9=lQ-s+_DgeAZ2v4Zm2m5p?`mGOQ9}mH-3GU-M+34KRv!At4 zSF{|SP0T0PDE?*IuAMHuj_)HWBkz))m@l;=o;si4yG{=`TWeEE@DXhTmQl8aJ+rzE znV8qJkwIJbkvhOk^iIf^lIZf^hF0X$$iG`#zY7}^FZSYOgV^BnziJ(_``S38mXfv0 zRfqrA4^!Xs?6)NrZQZ{T_F&vqRL?hSYqi}OIQ9jI2f7S#iqH)}JUTJ}}U%vvQh>mpuj19!2Sc1w<# zuu(Kg8?)3_{N+*OgrU0Tnf(eepC*C^?T@Hzi%(Wlifx+k)I6i664yMzQGY+N%)ivG zr_(!(jxBy#Y@Qq}jEwjJ#pjmLkAiCahwL9}B2z0fY&~}FhM)VzEX`P*dw&{qDD9tT zCHNa9E8Eb=w;}atT!tHovOXVSWJ)L;)4H`LkS!9(6C=rap$LB$%Hn;be5mQA1d5xe zB@oBfxp~m%qT#MhE3DlrY&A z;njC@kfd24iS*756sPqyV?^&+YsPmytF6{$=2DCs`6cuXEJVT5SOZOQbBBteG4Ky6ffJ& z^>Q(aH!`^#7-~}7a_xUmWF$`E^<0*22C4@aRWX84iCR-!pQQdBwRtb;xL)@Pg=ZRSD@+B#3n;dkNAMR1PdIy%R;L1fyx zh&QMAoZz^6chx%svS2cktGkh)MLLF#tH7w{dte-I{W2_(%|q{qZKg>!RgVNX($2he z>7|bD%=T)5L>h(H;$>3P7f@5{$f;Fu9OyW|9G?4U-4l+Af)QL+aoCW1z8ZL(p_ypQihk zF@=AIIi+I|CJ%}Na!Xl~OGszS*PaB9rcR`)>GU|512k3;V{afc}Vq-!b&;9byW|jiEjQJSSOk%3Nsf8f%FvwbYEjO)@{$J`ddK9}vDVq3d|3sabbz-(Rgvu?ank2exwGzPY4uE8^s>|$ zDtE(XHXHiJS?vQ#-Py?6tlL?;v;MNNPhw4<#x@E+%W5cJKUg1{fRB9%s$nZlW9^r8 z+X^SjZrERHxkHL}M!&Fhj_WKPEV1`8xRdl|4f!g(Tbup7q_D~TU}keIIeQvV1V<3% zx55|{7Gd^L0tf1k^wgGbfuZaRI&66WGJSPQ!oOt_)Q7q5J5kUk@XN~TrB~7Qpjb;!w1hsLZXll zu;jBGIf*8c`UvzMR^Bi>z*L<}zB7yf-3O@hGo~sHUncbFQpzGNT=*Y-D6VV}`MmTE zvBSpjQ*YaqLRm1=f06c`;*MmH(ez$w@gglkAi>D3>&+vl>&BF>$v8qpCm!iCdvZxO zh9sf+NOEB0`U$5^SH$@@@dPqzj?20eycHv|39wxJZZnZFU3cznCQ0zQoC|d@&~z}= z_BTk%Mtx@rVH+~fMYPfs$>q!Nq$zpu)bEuKR0I8fxmOW3T5J$JnD3B$ilD>zsZVgU zGSRG9|ATQB=J^s!X3C|Zj;A!I47#6hlzP>(5DW^2RN^-$KYuY!12NlTYWXabk?KHH2f}1Ia;7H*PhG8orS9Ncs`=_8*c=;kuQXRl)aE$}sSw$+LM5M~T{?;_$xC z;}3g_Kio3uCsPai)=I_Z4@IRUexZz!(4dbq(?RT+by_zZ_H>JR5sj?eWZEdvw%OG1 zfVsGI!oZq!$E3yB6%3gu&sku`5yc^`k_NJ!&wX(9Ikgg~S4Cl3aY8a>F5BvR z)jml@ap4u(gtl&wzMft_{K4ybeH_iD%eoK}IRhtOcalJLrDmSC#q!Q;?9j0E&^5zv z;$)R|fDycY&yY2j&0NH43V*7Pw7+3D)k@N3f{4(aY=CUOP{QB6i5~M=+v)Cd850=c zA1ca>O2qxQ;VtGz1rMQW;hi|gGJa>!jCoIjf$Hu39y!_9)(H#U7_ssuJvs-kQk-7< zt;(fLJ*ljUf8#>9@Nxj(=rUD!eGMh=Kk1rGMk(-HH7B!A6rL)aH$IM7tfolzL2F;< zTq51u-`kup!fa^9w1cW-4tW0tqu)d2WBKxYaFQA+am7^%RP?mQumS?Pw`RR8LMCLC(U1uaEv>a&HVM8 zD2dH?4w7Uo$1X{h?qjY{UwW(0CI+3<}jsV#LX@A z;}Qe)T~ULN`FaBdN# zf6p|ZQ|9Qs4_zbsh~NLB?(H3o#9?yq^B2)r8OBz}+B-#wD)}EvD@^J=3rArbgAD7R zb)z_sfU*<;XyYU7{mA>24Puu?johB1%3DYmzNP2~Rpi|aSBJYn`8NgD^P$>8R?cHE zH480>+Nb{D%%NRt<)SU^?n6s?Jic+$mExD4$L_qb8y=t}z;$(im}bq2fw!n|XghF5 z%kh5;44&3HkR}6n%(&o-Lnl;JXI!$=NZ^`!5CR6m-T#yfYJz)yGvf~NH}D1r@Ga~L zsQKWkFI$OH*>z^Lrmya#DgR=YA@*k4V$qPCKQ)fK_K(v@;B(!!DA6;{y_k)G2=T ze1xY}*6M}O?V{CTswDBLa-@Kx{mn9IXLsap#>4A`zR2JtqFF2c3%1)}D|T#p1-*a9 zMeQj?-+c5IVA|~79tOVqr(sGK1k_fov^~}vW}K>0d|{=GHT$?Ro?OqN8kAzp1!M8u zspV5DI81`v;sD=5uiN)ZGiQ0Qs8GYDL;iL!%S2XorM!P8Gm&+1$xnvZiM>F&E8shp zh6dFX??WuzK!dDU1C%7T)gM^IU*oV%qwp7B|G&nfzbm{sV=?J_ZhUv*A^E`(Pv#q* zkil)!%cSeakMmhwL4vTDP;IqF5x_WFnB zuo>=6RQZL_|27z3vZYBzAcLM2Xe<3iAWTJc2rNlWC3Z4&yrAG=m<-Adxi-g+u18KV z5Lzzpa=Lf^(D88AVRDT4daC=o|EpF&zv;M09V7{7L-J0H8{GMeOQW*phE zNTRRz(yj>Ly!@LTQbwRBZ+P8%-6=g~S~R-^&VJ|Ffx5YWzUM;*os#Xylb0-GBg0eY zY3YC|oav-zW|!X0y&F*z!CgH71)+M+j0Ye=8_2 z`jHO)Zcvxso+FwN!95y7Lg75}dM7iEPRF5N%OEltA;<5Z7^_qrdoHrbdNp4Hb}5NX zb(aM*>cjaRXGx@W4@iCs3_l*pkbCG8zT%@pMPDm7qDHPlsRo9*@ytqf+{jBBgUo|5Nf?*dqOYNAbSaxLoIF-p z;_CW>ba)}WSYS!~wMlv@3+@mrYGL+Vg@!i@mk4NRbqLzX+ljr#v1y{HUNYXBIVs(| zc+tXlz6SZY(*e8IY6oh>l7zr!8GKuX-dkg^JD`%e5US2;89I}D=RKcI*0OPXB&&5E z9DiG8%~EuM=~}U^UN2a7_pojbYE&HR%ZzNU*ylEH-1xZi;E1x`mJD!9g|-%<;oK9) zZ9;2KGoj>SEg5jR?1%wE1W1U`*RKXkzHs7y>TE<_(r~8Q7Kr zvoJr5N`(uS&}6OIueP8CO?u`wlN`?JCK0~7g^X!r%uR1TiCPQ@_=V=q?o@HG@{{#9 zdSM?>ja!?wZ0U%<+B}wD1kASGu(dp0Qfqt3z=4-zBSkh<*C~6ac*)Ms)1s4`?qK{> z!+&UXX*Zd940OYgTTaAyrkVeny}Q=W#=v;m_a)@yADimDuasu=uG|QCyOO76ENh72 zEwNSCbcoyIZa|@DWl_VPOtSkZLCpF`hFY%5el51R!gtCysZSRcvZ5r=zn_f*Uv{5XO@3cA5S4-vUZtjFeEp}oj3PS zbgSjRoIahZ84&nP((yIg8^p7J_*fZQ{-UzJk61@vj~Ddw(Ta4eb6urF{A`98dtf6cn3PDCu(^ z?*cMRQ+0ZPDV*&mYFJVCg0myI)Kx}F)N^+|=_$By0D_VyZAYzIoK)L}S4pp?{>qR!agA-F~(0oM!N2BYH6U)0ibT=3k8W z(^mt2v0Zx)ZTYLEsBp`dn(L+2HebHSe!PLZMAYS7V-~&m@C^!xM&Qo8`82j8sA_NS z1APS~eZoD8dxiOnTIf&PwdN% zZM?!(q^*PL;~&cd#jo4*S_^Fa{YCFje(>J0kw|jKiZrHa&t%>?kGmS)Bqp|SoDL}u zXb2l8gSyXL7A7BAqlU1rhlp$bZE8?75Dhv%6?Kc<9Cp4dtxgq&ERA~9l0Q;C44!4w zkEE5`_37r)mO(Gh<`XRHlN`EiMSwX1!_cV7Ot!IKC1R{poa_7Hr}^Er#HExWr9 zj>Gv({!y}Vr_#5dwdIU189<3AX_*Gv;#Kqj{t(WFdD3L_J($72SpS3M!X(M;g%BkG zyC!j-r(Lit-@gxsx!tl9EY!OAb;o&5qTaSla za`lBwUA$Yf?NABb_XCeq(EJGHZqY;>^xE^Ehqj+)3G`&Wd0HI164%qEoXEKz-zvFY zEV^$nZ!E95k68>Z440ctJzq6wLHsp#26!F|FC$urd;ND^v6A=K@u9byO1ZDwjAD9X zMt1o%2@SUM8no&WUaRFw9h-HZ+?(gOZ8l@{c_LGxhn}WUkFdiE4rdx!K;$t{`m#v> z$SLZ8)y!nj%g!@pl?#g}hLjJu1Tk=uObzYZJauX;qpC*ZFsP-i8&JCZ{f_Y$ zm%k4@&uz@UC`*jGt*n$sBGw44LA^`hE|U1VEqhOWqVBda|9!+9=w!7m=5%(Ma{j?; z(9gMN`tTL5@DC@yHJHv=TR_~YdJJO*Hv)g~ku}Dl{6m3O7jP|gXX#$pBK?CmsL35c zkW*0dfic^>f3Ig;k0QV-C14##;fbe1<|BRsY5aLM7e?Ysi!+(^=`G+Z~U5%0JE~vvjQg=*u@TWB{ zF9qEb5F<6jsx(WY-%ua7AEr8_jfV<(Bwv&oa558L^E}#1)VqA~SKT<%t(mVk()(VR z^BIcjn!pZ&OS12j8g|~Fl4-?OmN|n!NZXRGDqj0dw5zl*dLC~=;5#HoyUuBj zZhI0QNLb(FDk=B(X|{>rz#|GN4EJbo`^%AUs*PKSrjsXBS6wc9-v4+9W9H(=b8?|s zITgMmc6hFJW-7k=+E}Y;7yG55MtDJ({zSs+zco-BQa&6-p2Fo!(w^ux21Qd*)?wL3 zx|RS2w~8b|mejQS@h=9T8o+P@C??T(L@2k29#tB9#E)v)M~xRK0Ke88UL^kA0N1px z^pN5*m$F*&6MH-81TMR?E#k0DO`h{~fI_A@L#x5ITm1}-8Y@`6uB5z<4djpT3+ zvHHMDZXow0tS&e=t!3!H5t$uau^FmYsWt_YiFzIrcXnBNwbsb@%6XGU&Wz0)F2OzC zs?hpZ`Z$P7J9Dt%Kc+u z(Vz=e7u{8se<41}yZHbf)>$77#$+kW%{XKP!OD-ijE^ap+$~RrzDgYY&6UZz#Jb?* zXfxG0KGtDwuGfp`)jmZ4XqUsoNI6n_QrWmW*WbJS1bCx4Q7YwR^~YjVFH){w0;> zP$+AkKmE}x&oo{U5zk?0k6P)?q2hFvK5cSs44 z!0fFna=kyI3U3vOqDI64#*G6lUiy2M+31|}yW?7;uc^l0D9#8gzl@H)wcuy9*nk$t z7V@a(qob({vVskt_D8C$&ReZd(+<%BmNwbuwvPus+u{Ag^H;j{zYXAIcpLgN{4=F* zifDu`W$&n{DG`4B$kzR7a%ww=e2&MUtggvcgHpI%;zWa^6hOr)Fd z<^DVs*%OATMd>Ev&&YHE_mM^}4xt*9b*l4Fi{<6gS9xk%&gly6-Q4%9v)6(;b_-vf zy<4;CKqwK|AockfT3>0sAsuCEFDwb=xO%WRtZB&?(CK2D$}D>1L#(}tONDtJ zQ<&&SCgZvMY?Sv?0#XJJDyre#r!e{%cO6D>T%5-2tESdz4YNWznlD%S?f=NRp6Pc+ zM-s6M>ktun@_U!*(6Lc#-bxwWA=f-KKRDZ8L_Hm;J1AQ^EB%d7!6{V)(snIgD)sAO z8nv!IVw{@1GbJ-wJUa*xE7ox&Jtk8ofufcfCJv@0hm-GnYS1-T7cql)Bumva6947o z6?xb9u=%QnD(4RielG1NRRH_zdS~GssG_0L{qOC%{XsOA^y}5>IYi~}z4$e%A^@{o zS-f7#H$Oi1tn&wcbnHuc<4K>#~7RvOu~ ztt9a(y&L8Y z&8OY~GdrNClmR!pANbAz`8IqL@!;~|&V4p56?KV@Pw&$_W6o3e<%L9_cYcs6_<5Z0 zVCvy7_>yx)H+}yBJ;M1NkIg>q{l-1SW)c;tZq6&`r2BB*q=~?mobg+BSE|dFQHhJj z{+8p9yr55tG!0Xc4uo|U>Wz;goNIdLzwb_L{cHbo5hm#$^W8JXZ0Z253;m88zdAuY zNq*_niku(8tJW2&CbSHRFD?n4X;LS(U2xIjunY36{A?=5&GOtQsK_ka@w{2{uwIuo z%@MUVy9Sz8y~7Wee5uStg@q2mByDow%7u?Q0)YIGzN0O%DNRn8k%&{x>;*7D>;|%U zr(ybZgprL($1ooVacu(IUgcv?eGyG+EPBZKv(@~W=;(O2XjUtFH8T}{Hw(eLJb1v zkuygnoyZ;E#RWJ)8QPip7N!|Wt5$TvHc8$iOIimi=bo zj#0I_I#eY0Q*uvSkV8(#Ty42We*2+IXCNnB>>eDps4K3SN1>;qjpp=vILk?Y-Xv8J zj)paEAlVCr>T70eC1;&7w2aL-FOZs?kK9Aa<@Y)p(Z53$$v;9llz4c4%c2fr7savVd-!v>&@^Mk5WN|u5&b+B3xPD zBXfO!a~+RbhdpIc11d2~pb9+ z7t(3pSrTWSDXt`2Gr3r!U)29GOn$*e<0$8tgZDdZ$~;uPatDs4F$4?$OBZ2Wo4@~V zOn_-Cvgnx=TBRlF>>~nPBjm{Z5N?UUA6=Ig3@(R$IJgT3Yi9v6erUmDJry{g2m@}k z72m%~6V5!aq1+s4pGvM@T?df1RlVQG<>_)CDCogBU1A*xYal0x#_d{uO{eSiS6m5v zAsw9XC&CxT>RHWrt#k*Z&uWcT%`B`V6m=l2bsVB|~&#Sy`wj+J%) z;WpH9{W=)m!0i)F*Z*XAgwT;Lvudc8A>Eu;tvwy~@KcXB&84Enh*Scnlc}U>ZPvZ- zN&tBIT!e`H{&T>@N;eXnPv^0ov+Bbn*O@jutY2$-uLm#P40rk8mVPryEN0ayOt&Ai?RryRhtGK>Ti`tGEL?@Dr|C{PuG=o{@$vDGnt3p6 z$K4K=PKh;*WbYIkoW{JcJShiUN) zr`)3*8Hh&Su%xF7Yx>!mlN+JiX7CJF5Fc2Uwm_=Zhx|&#;Z1loCauC`f8yIshFkW9 zWo<$5jqj-|SVfF)XXWuCyPr>FAG*q_T=<<~*Ao)uDz~}TSE={N$J?7ctI@@`+O(nl zQ#0b~Zu)x&Pm~GX_c6>_2hUmSrD{D2lpp=PK^& zg^ALa&LtWrE}lZqSG&x)Uv>)#>XTPw*IMnZk1RGH8y&6uimR96;1Rs3yaLu6!)9Jd zlU`ol6dMW!F0VFHf-aQAlmGcN#-1o0>iB7kf8BHHUw;HX%8$?<{BOg-LSlqa^hstF z&4UxG?P6Dyw@uVho>Y74HG7%uy%3wc`$Xe}V~%>O%L?yVtH<3jJ+*{X{*9vRxzAeK zsb2i*I{UK9f=Mq|Xk`{;e5Ngw|38hk10cbGg2JNU`V5}d<6PTcOGz=k|0UP!8RF z<-D17cQV*=wbJHp<*f&zaHeGVyg*xvVYnWk@}d$w4)vVue!;n(tlQ4WOyXo=MplzJ zG|o-!Ft3c8I*si!$|x37MwCsR3*EDlV$f+Jq_3MYCWcKgQ;B~m1T%RsK0B&6dGBe@ z_JS}_@RI$L^SvF7w?qZ{NmF4SW!h7m9DKOo{4|sg)->bwtefTkuLJNEE=X_LJ4!$n zQ@lG7a2EXZ|4*!3w1e`K_h0`cN@4N@4wXSa-Rb9fbREP*WUdUpI=t#&#*uLetE^m? zqIW-}LDy-3C<#fA&!(CoV`TH#pcZ&B-6|c0rgxTIjR5ZpW*Y@R4`sTn+INnz)MC0QZX6^@qEK7$7zDH=(u%J+K109kv z3ljZ$vi^__U$g0tJ1bEtD@b6_CvjHH>DXSi+)Sq&Qe?^m!p5aJqE-)X=?F9n>IL^y z4&Mv8V6SxI%sz^&6=9J&d29ls>gRb3FP$Uz5EauGt~5{c**w`gOzaoL#qI%-I$Yq^ z;xJms4cyW!LhEYL7%OnJ-$=+u%BV;J5u+vE0+VHNGGbzI0xr*xI}iV9jkNA8>o3y; zMatCLNoC$nQ1dJ+$W~h;!_o~hr$lsAvm)ZPP)H#HJ7W1kb$s=bSPG%IpnRxZ^^CW! zSpy^R0R&5x7u0Z&7#drs0>lbiDv;k$pT(I9t_s&kFj_nI7fL3meb=Kfbd!FcFm+I~ zvXLwaOcZu~>sW{Dpg1I=ySC5*tcsg574Y-0>VxP_+FP3Ws`|&9Zv%c;{ErT$77Pg< zHx%F;^qv_Xf9X=~(=TuDbzK8LggZN)!yd%qj3k`sn zVRJNyTZIClmCc-1O!#JgoT6DyT;T$+6l$nU1#xitd-(F?J8573eTWJwh=uQLoe@V2T_*Z~aJ;MV?U7i5z&zavHNMYlQ`M>mk zr~g&wFh#1Il~;7x#r%EWTW2E!O_pf5L2v0?%*aEPm04iSwX}eip2S(nDGe%f=Sca#BB|mg?v?JcWzXQ+W!6X+BJm>U%I2dr6Tb2%uBb@deO5 zUqdiwJpp~^1p;-uP~u!S=v#aO;>CM%Iob`?jBDamhRLd(5P_QWqbv{9`6~`C=2`!N zm>H$J1rM)7*BZ0EcD7v;m7*Ko7O|o%a;%RX9Ul=I>}l&*%Axf~2gY09anc|={y zEpy`F3>KWd)q;}1ORPG8H*|gjWFpyWg5Ymt`^$BH$KJ85qf$#!;pTZ1w3%iNaWEGaQi2`y}DADtLo78mKYOq6XYa!8+# zUW`Pxy#b$-!sA;6?g0Ex{TQn;!epXtQE1eT4UajNqEmojuBGKKUUWDP8qHp zQZ!p&0PJhy(pq3lWtw<(8GM$DeTNeQ6KPvdHgrNG{NB{ypAKO;3g!EfpYtLBITOKqf-4kUV&Wn$I(--c98 z!xkfOz;TkZJl_b5ut_YN=Pb~3Qvil{u-2{cdceCmm%m{I?^^%g_nD$USXV-X`iU%!!OZCHBb92hhyk z2wi)y3zT1)E+Suxx3#D&6zutcn=39q9ABljThG5Z-~*mg_O}d_N$Qiq33tngaO!+iYmkn?$Bs z*JZ3ChtlZ^ZA9H(+_oIhitIuve0#~Q2IYWlpT*kLqc&4b;$(wYd4C{I?}STodZ-DJ z?El;!6UZo=i2&V3vQk`ETQ zU@vsU-iSCs-NcgS8SFli`|Q%d_ZxQy_FB++r(BPwz#lw}iwe?gUOCFwqyu_t&|1+E zHMEFw7EEz&)!zj7$L1+(Z^^{$0t;MjNVj`=Dd*gAg^Uq7hB>St!MhE#{Qo7^{nhAU zu;6*Dz9J`mC;4K7kL&l)Ld(xn>z2^88YFqvY9Jpn)IV)7x^d4)5@F5^eks&hMOQ(~ zHiUSiAsMSqy&qL#yAFwBKB3b5`z8tP6vCrc76E`NV(bMf>}jIKQiFSaDg zoCO!8I#?ni1}X=Jt}?_X0!H+)5WTQ)X$E4zWq*6hq;kIrOVniOi=b5B8(-KBU1fHP zu`{DUA8Q_FtFzW8aZntu@pf~(6k)Bf_RUF+ccTzAJ7}rC8VG4AvzZ|$7|7oLw;_rl z#>i?Utm*It6<;qOipNsHjbQN($RRQ^Y*v6uv_3xOTLsOW`Rg*%nl!cg$r3Z867RQn zw>WuhW&gUX+TJnJc{nEdbrqXo!d|}bL0S3sF>HWGn}pCogw31}BhA|g#dd>H z3X`7&?RD+6?_-Q%*iDXP^n^~ocO66p2M2raX^1At>MH577`S?z6bM&F;WE(QEP2O* zxvm5Ou(S;Ca|-&suK>Iqk^6$FzZR_kcCp zFed(&uO0HV@VMLsxcd)!Xk_Wid^;yC5Wrk{#S-hkqb8VaFek5Zb;RNa9$Y%FobYFE=@&)llv-Yae+(f`ifC9Q8# zU}75E9Bg$J>ya%Dd-;ebm0L~&vo3RDZ2oZ8emiMcIUScZ^)TW`ew6lBGt?`!)C1IC z`KE{7Yv?3btIH(BN(o_k8wU}KKQsHM?4-CXZ`Pdt?6larIeF-(p-OLOYf;BSl_}PuX zqDnD0-b~+yu1*np?j}KZxc!39MmH#;qQcXyuJDB$l=Wv#)0xrEd&(bYL+3lPi@*T zPPDRMwb6DV?@^xpw?S#at6!*fT+8!+u~ent|5dAr(jpEahFl|hgNLm@1qHi@t6)<^HVbbwV-QvlZVuWARu(hOeF!*4#1iBW<;dbdy20WPAXAP)zLM#YiVF>DAq3ZL zFm;hUOlH7aW^d0$^@qYVN^8oMtF&`MIF96|SbE=we-D_=0Vi)Eqw;}`TBojt8>j(Hw6X|s2u4~f2?;nBFLL0R$CV`9Prtw&MW^O z?aM_rcdS@vYAhJ-iBK_Fc?0yD4Dihn_HgJm*UAL0ecuFPWo+0IB7#=i9a}>9bLFk^ zg(e?gra>H^FoSZn5(?*2ujXri_8BLr`Rw((%00^+K}Ao ztoH&Q$kq*?V9|e6ObZ8=S+OQepNab89BEkZK-86R^5~0;rA#~~89})sHT-!z>7nA-EO?+qx!+1Syfuoy$oSMF z_MRyCpEr?nzR$!WjY^tN_X#glcsDz>jN0I4=p2=>$VVF%b!<0h8} zY8koHg+a~pI|pY6b+74oKWbFhlswcEf07qj8{6@job(pt(=8|Xvyp8z*fH%#an@aL z^AjC<4>T_RTCrj3A%r4H4vC|bEjW^qbI)=YR+N>%B5s#ohhF+=o#x@~VsL-}^S z7LJL(OBXqWz(v2a0VEb>nN{uyvSAae8%t1?r>DxDRb#=6Bn7~S&eB;rK%1w{*Td9= zref`QhIt$y#QE-;3tz3Ep53jXuWQw;Y@?vQwAA)60{ITPY>$YkLhOQkPM&SG*7*=c z*e*NyN1cQF-1b4J=^R0v8~1k8#!cf~b%w(8Zpe82HsZEMySi1RCz;2X2}bwD%XThJ zgc2VuEB?u!3rLgF;nRJ4avrtD{hf`t{4+xI(Nlp$2mXxP=myQSE!dnhO?b}!r%mS% zd>PZWQ?fjxYmB}QT!g7Niu8B$Op}f?sm}( zcd3hXeVB`nmmYi6nVU>F?59)g=R>vgUyJ@cEgZ!6>pXP1yKY0d^PJ6_Wl`N&!4pak z*RPxi)LB9Gq!3DsvOIN}xxqZ&s^>EHnTr-2Hkq3wjp|>pW-Smj?LKJ$?*Xew(AYUA zr0x?N6n5!e1XbOT*09TrG%(tP=C9lt**y6Wcsn_j=bXPW>Ee9(S< zf5I%TF>7@M#%eTj9!CgNW$nHn=3ER~2?=AjjRD~t+w5BN--a*$Fi`G9IUn7tm@r}P z{$|gqOU5bxY#iTZ6{*dZN5Z#ZJ?s~>_?ES0c~$z_A-cU0S(oa18wD(PLZ9NN))S9W^VNeg!>Xw!QW9 zBZ=1^b9Cp$3Xc`1OuM6vGi2rRSp6cQ9qHLV%epQ_5kB9~kI*8w&W_+D$TwD{NRN0P zNY~Jb(46w7HCT9RSy!$Q4ewUlpWVkb^$oF#(-*tl%E69$ELYoK{d%TiMmE%sD8qU? z5q1PwJ12;!l=z05&zvtUZF}Yuvg#sW`)f0jU}c%5n6$IU()RVBuPA(<&+c1jio}bA zTvhIiwt_T?%1Da_B=D)$3v~ao+Jg8AchRImKOmq^W%48CgD*nJ$-$C;^<5Y6wKs4y zooB;R3g1EXW{QV$04N7hP%eT|y%xLjQ2x{XMjanJQ@9kb`c7oB#0o520d)KbL#(Uk#YL8iA~Es8pxSQ+wQW0;J-Gs zo6@zc3MW)b?M}R^_{rxlx~{2iwN@nQJfVp;puKkz$3#w?ARvgh_c{{VCW2lTx!)+; z-STIX2gnMy)|>0t`LyatPx2AXky%UYgEx|_*m{Ao8IHE8qiLn!EW;KX z7V@GzDw`+;ReaZoLd_Bi=m9)kqCc=;q;)@>JjQlF>i-f7ZU=mW zwcELm+Ub^1JicEP^3aW7+7TUxREq67=6u=K_Lv^}Lh*brs@y?(8g~SlP@NNNjPg%) zZ1uXX2Yn2L`-P5BaWMrJKqy)%L#FCC9vJJcr8YT`cs4dr9URP0GVNoeuIK7_fCr^z z`jAX6G2fVyA~%+4$g3c4gT&fk6$^Os=~yQ7HJxH1S8FYCg;1%xK6jG->VEBu-ug zy%^dpW=O3pP5I|F61pE+|H`O#K1*3T+FHcYlXPWgG)-m!9^}|S^F8av!+F|Y=w*NN zebaZ-x7Z~Hsqpsr$HM(h9%VONLq4K-<_0F-U{>R}pP+Tq=6+yY{#KWUGuG#-bd&+F zA&Si=qf;!eU;q@WO^MD&k3<)Y?S=`D!=|#vwT;^F^$tep#-(P4_$YnBWL)d${aTf_ zKsEjBo$IGZuU$JU(j{*U_4tGt))ckRN**m6z}pb~3}(~7`8FfmDe+TgCO>%MzYP^2 z#%cHD@QzLA;2zSpf}{->jeDl{!AH7i-pGUzh3!tB|J=($w(1Sb(c3{Vca~e}+-jt! zVMUR^ z5k0><2*qiGh8BeY~IVs!?8ePf1T-3+$HFG++1Zb7f4%a?`(>lCa-Pdes zGv=apg97iWr0-Ox(^(}@?o403QCG-&iL<}z96W71Y9T~3Bal1qRwosts&lb!mm`%x zK%jF1VW58)N7?qY>hKCX-EX!ePxll1Q6#GwZ2Eom0%x@hrCNDjR1_ZnjjqT>#6ukRqke}DA|98p#41j|7?Z4W))E!N*44+aS z!Nk;0LR&8c=jgmW@U$VGI9iKo+hn+T7M%#F;DhsYi>7I_Tz$8WpG#|8dl}JXwz=Lz zvpOMOmR>PmR$Z>WQTy0*YgE6=C$xoaS4=5Ko)AFanw|eYrrtar%Ju&PuG6BDRFWmk zDV2&yC^Tm3^ev~vRLU9?iewMj=dOh8r_({iOeJI;`);Oelf;y5LJTt)V=OafF=ji@ zb$-wD$MY{VCy7D}x{9(!f~tH$qCn)Drbs zboq{$qdv+B%hLBdqeKE0uG9VpV)JllKtfJF3!y3AXRmZYpw`#<8|%#LO~i_Aa4%}o zIA>QW4XL>R8*`F&?)?Shg~MMUdW0!|Z8$9EF(+H$E1ifV--2o@Uu7x>&cSwMs>OIq zgWLs=JC3c6KB&t&`*M-iSadpJ&+YQ~PqW6S6-&@8$hiyNH|K{eBbJmQhs)=%CCgHk zByU%hy0re#-Ce&LR!J2RJ&2~I&&cTmyK#EmKeOP$l(Z|Fkd0znlxzCi*WWKaIt9-C z!7pEuZ6Zau$cCut#&@rzc80h?E@SYmGKX)P138p#ACBZx>}tUh?7jGRk%|@Tw%d}k zpkLOkPa>-~Jl%V2Hq*4Jdpo(vl{o#Ahd)MXRsVg8G{iPq0Oo!?-qQH8nz5R{$01W$ zfGsHi!vM)&hbj%CRpyGa4sDox4Gu^8ML2&x^M0$Jz5EHxWZj~8fkIzfh;p=ikx?1C zPuJal&8aR$KNR3bxee; zZZ;C%k?p6L4MQ(u*VX>heF%4*?&VNd*JcM%tYH4mhE6^FsdSE?BX4|J3Y7}ab(d^K zaDe-#8Hs4~LA1ll%(LN@CThxp^LYWOUsna~9qT)EYE4c<6xKu5lFU$gk+q6T%G1!( zW(_md(;Q_GnOboAFTB;8KksjM7*Sx2eb$!XGwd4K)^<_B6UB;dIg zOdKPjj$uxWoFvjR*b5U-XZs=-h~JE!LA0xia&jAD(}a>!oT4O?bafwf;GgUfCg^kL zUubH?A?H2d%qNeRo`R9CcR0CQc9=8Q6iSEfe;B-A&R>(n?8GgKtrSIDH}dJQenPSI z6{sQSs4q@;aL&@U;C{9qydV7UhJF3XK{#Qt$p&afXE1eV7;7;GY6(C0yfN(a%{g8W(Irj3WLaS`Xm(WkW#+A_6l!1N@N;c^P(pbXN6F3^WSWk^} zi49`8Va)&GO2BhcN>N_NXBA%(DdKr1wqL(USJov$fR19bvQ|V8AKXk2eOrb_?!y58 zSikA`E6B(o=DG(GOA@!xe&<_rgPVCowlA0X3AjSZ)gx7Lf0OHD=_ADFR5K> zYN_+`bIEKNXD|KIc1kH;g?gvjb|=DLzXx+q6xbWBi#{uqw+{3QpmRmU1tT1aGnaQ* zm$XdMqGrvCZ0`*+lF&N+0F+;VefI*hA?sV|H8X=Kgm98 Oy$BhxYbLSw~-2gy;(PTTlK#i4#44krn+i3 z9W=VmX0!Y6y}8-p8SZ>G?w_)y_`ZFZa+4?>??*MmalV>xgj-xU{AO>kufCFe4*Za) zUU$2U5(h|p;g>?go_99z^PsW7##TGc7^mCxMCjMq7m|zb7IOL~^9!B2yuE)v_wJLL z=Jwx%(s0<|>xIQ0p(@LTO55*B#CrxSu6%(#?ZKxaG@_%~q%Os0n`n=pK87d%yJ4q% z+V=jJP+ACi?dgU`!Ti2^(OZPsL80E|^XmU}Kd61A|F+Rv#E{*ISv3--1OtBCKQ(MF z;**=vf53uc;qfsqNF*;lRic`QHe#}FB~Pxe_C6O5XA zPQ12E@F(9IN%tzTM}-J{ZcWZLBKCaq(vQA+OUWu~t+gVro;f0|w-|TiY>hj#4zUU} zb_Y??7^*9>4I|q*c5&`T)xVnuf_WSWs!Mkd=JImBQBVvkgLO?Ldez3~&5+5J|4dAa z@DP{8L|=S9HGk!1KHRFoC4`90=oq8%YVg^QBR+K}%Qn31oA!ZU26~OcsmH^f*4up@`uD)xYfq_CgLTcpYf8|EP zxZ@laWiY2nu>E~{N(8dE!jKGcSNI_hZ98%-<((RcJO1;^Jq@ewP$o_b`DDr;6{Agv;6c z-IA9!fdMQM4J1Zw-)Wi7zE+BK0R8hBxT0&o62WPH*uXIO>7oTD7GRV)qO+Fl8C#Ol zI{Np8^KdV&21c8S*6xg`yQXj?{#ynM`fk&_?p;w7m+ciEt*m_J-wh!$r}f8YWw3e9 zwW|2!rwis-R5bS~Zy_d#AD@s<(yVn;)asz-@>Id`T9*VrIXy1S>aT8S%1}g~*c8U^ z-zJbKL_W@=9cVGVws%~%_B3`-jU26YVFr3BneH;-sAV24?YJcIhK9!mU>HK6$y|v(5s-+G5mIHq+_V=A@HjMRp!MAW-#BB2<$sxekXaPuprP8$in-Wl)1Dr?{oQdp2jao&=GuN-uV#@2pu-ZKUW(Y8h;u9n&s5wdAEmYS}%<<{X!O60gu|uJzyeK?|}QIPue0e~(+>YiGAg zN>)NwtA(XPY{84q!vg+C=$k>s@`OatM*W*{?EMGeh%JkR;0zOy_%S!&{x1n*vXmVi z+q))3iugw?|KFS~QY)wonTAohGMtR~?Z=ML6IQRtF2<7Wo$jZEPKck_<#|IbNBTNo zb8TWdWo8=QDZDiFD3WhBwUgU#cqa~Vyzsef`C6J3lTXD(E@r5l@ZYx(rTf^LN-3HQ z{~0}&3b(KAcEeO%ZulPvgyJGE5Fem^wsgZb%M>Mw(s!+9v^FIgb&_Mu%)Ej6UH9-%AlQq?(_jY@!DS3Ub2yW+D&=p>VwzRw)&(|QPm zc)qEvxxWPs{*q!Aq^SVvD_5HGBPGk^qeIwj0Qwn~r?b4B3`o>CK@R>WUhh!1F)o=X zZ;}b)SV`=JfmbN4dM$qlbHUo$R+M~sM_o~>>?gVgQ3cQq>+0`I(UvNmsOQigD*JT`EC%-;D2k>)nNvR@cI+iFP5gE_FP+--K=7s8{8?Xd0u zy=n4y@U>BXRA#5f(t5C@VWSQX*EO@9rSAD(b1Qopx}00pE9G|&CJGja;!;s{JF$dT z7jEU2C##EV8JCZD;&(PAAui?E>ZKlMtk|9cmr-cFFPqUBW70->TK@#lI9a&9w4jEPwZ+TCn)Z0#FB^ zU5^ft7R(MvF40=NW&0(%&0S>45$wU*jL;Yl6eC^Ud$T9TX4m65?bJ`t5KyvdN1RYc zr<3u^Id?W<|BVahXf=9QJC0iw9*}=rkkn;`E}ahi(Mb#1U!BHi`fJ9F5cQ%cwl}b` z%+GFOGI>Ys2&myZTIw+m!{tA@nh3LKC(nJJ{EgjRL&GaSq#PQz`Vr!1x}cKvJZ;-` z%f4LxfpC(Vnhdnu_W-hRs9+^-`Um27*)V0d+AMNxKW=-i&nv<0&7}sCBj)6Ds2xzH ztz6cYq)cV;EAa{07vx6dJ-ot&50Yc-j1+a=_lQVFx{164UgBKFGd0Ja-WtB~F<)Em zcYBKYUxT46xtg{_&ljA*%~3mH6=sa@fJ${dI!gN1tlulKyaTi6ZjhtRd5&p{zEO(d zNlS~aZYFZ^WBKV}3Jd9wd6F2V@S@{vs1)`Yv41KYQhP$;D8QB;4yl6=PnC~vo*awu zoP69&$gBfXYNgA@fD;<~QFGBw~;6T!~coGp{Yu`G%Oju941? zMN(?QF)~ftFY&JC5Uv-2!)t`6X!;^K#SoezF`FGJR{uLVf&X(YU9J~ieQ_>5h+4Eh zk8~1U>{xTX704q`%YZ%#eS{gtb&}1&P+YJH@cSZ_aW7U5{k_*yl1`VD&IS7tH7ym% z@o?ct$4EKZL43f92(~U;MHXmSTzv+N+b}Sr9ukqqlK^`B3{jD~V1EPb?DX%zl>u_7?o`Y#ObFZAu)B`|V^Rk-p^mY( znv2AQHN>sS*U}h87N15yeqWhOh7owYm33K@1cWr{lvtC6iX43H^qs2Xo!H{0bU&-qTYDJ8g*0ODO6|77oqk+!v!dP}7_RYWz0OfSO z0y!>?0xU!D%D+ZSuS&?-WYgF^uicN|ogWbFd$5JEa^X2-(+cjfC2=6+yhdhvHo0e!O; z$j*F=pGMJ&+VLuYBTavl~zCnp$QERx)1cNLBKRRI&3qu5gslIo4i8+rJ7NFE+K z?Ygt;uBm}ZpRz_BtuotHWgjqyd(>)or0k4}{G_n#HYGpuj!1&ghhNDWh}V``?mJcA z4#2mYOY@LoSSBF^6LL{WL#EA^xscoLMi6cQbBDGz3&_=MuFa?uqcPm7UG`8*le49u zJNqM3#r|=aU6vZ`9zpIM$>zL{nyDJU;=uJQIiA8HVy!b@OM_`rx@iBN+K15~YQQp9otH9A-$fI*t+5$q) z@{rS{>2z>#2!EVaj&fUYTcI|n!UK__pLol`mFea`J=4?w>myiChNZiENmh=TkumOOfQ05;#ZIr zfwfITsgmzn;_qNlu#3)ngejfxf3qMeqWoVAMD~3m%M`qErQUMU+5NG|Da)b_gJaU>b`Gu?2Yiae*m^Zfu{lMOzNXP^HnqUA^IDwk`qJHwgu>KO}pzyS* zCp#hSBve~Y==1K*z;mQk1=5YO^L!Job?#pi{$IK$x9O%KMbu1%DBLHa+PrM<$Wb;d z<(=5FgQ{_dyM-kA%;!Fp7GvAkP!^uV*}tb9IUasZqjWC9Zm$FIwo%}CHr+yxxGGr( zskSPmR5^M%#{_JD-%m_;ukAmM9fPP$oZC0`b?rus>FE)xhU5Z62q zqf?Ck&QnoAX$SUPggwho?znyXqx+$%vm+xhE`q3ukQzHXL^syWrq%C z_^7k7Vqj2)AmX9IOdJMIfI0xhiADKX@yNp58|=}x>mCviQa$TZ-j;Hvo)eK@MUKYY z36(@~9B2f*O{#eyrLIMG*1VEmdaE-E)TT$r=vZeQ^6MM31*9GTueJDfL51Eq0$f<( z!sEaCe1iQ1KBdlShJRmHM9L~MFwWw0&JWyBN2%n6a~xUBE<_VvTew6Ino>*{Yn$2I zI4t?&-wmT9JCpm*{T0My&2O)Wu8u!v@~u);L*e-KZg)cm-!r>@QLL}%Bo2%?+`}HJ zU2eKvH%{pu>RtV&Z=jY^D$Y^8W2dVE%42<0m^6TkG;Iz2;C+&A;ZNyUv$cNf-LO^u zTyo0lY0HHR0Fm$P!sdQH*6|l$ z=4Rs;!_CBch&}zcM_`H?r~hf_tgY+pDe&o;SdUqgxt6LMvu=0)kyTlaR_5bN{@w81 zz}H{~5N7^x)NiGti&n`{cTL6pC7}lrhoo6ty7>Py2>$SYdfCamZQWKBk(Yw+?W;d# zES#r^&jJufqn+6X&v$zcFm4Z4geq&684qkftki#NPg;{1D72B*>Z*6+w;c*NM3rj& zRCL89@-|DL)+xuMDii)d_~Uw9 z>ZOvHJX&T#k9!>86$SeVUA~qj|8)A-AiNtQc)iA#gD&ZOldHX~7q|$3atftaSj?8a zLtN7`YtM!BWWPtn7=a{DeUFgpX->@ot~=Eg_*bE25dw03Yshb z?}qC|SEj8kzqaOD%RV?CfLtNh$=4NGG`Z(+T@Ly)MYfxxvCX~BEy4#<1whrH$%e}Y zwbS!aXa#t$od&S01tuS<1gv>XIdWiU-n*c+pL4R*GWdR9D`3n6?of&(+4`vRrUp$~yTi>~17xqU*27=AW8JRp1cTQ#-ZqtTpj#J#$Vf-b)gIkmLUMikUTJbBQ!g?3FtW0e&mK=m4} z5j3Or_xMer5?3$YrzH}9@KF0a7BjUDecrTajx|$%cUa-qy}x$t(p@1g9ys^L>PzT3 zQ_pz%e^hYI79I-+q1%bsGg&>aVR=`2>KC?k2U@<}yepRjkP$6u9^|{-w`>W%g@d}) zbf$j6uNJlc&e?3ih0p4q`6QrP>iMEQ$HVtXZnN#GZ-*y5eIS+#P5f4^cXBW}4X>PW zc%=ZZ*PzHIMr|M2BW?JM{E({Q36uYfJ?g1aDr04EF6;W*QE#_K3ec;s# zN3W|xbx@sqIc5N=**v*JY@Y%mjZCPqo~XKqc5<>mv@{lM!p#P^oA%5H?vm5y%Kl7= zKK9AtycvT68_bOqK^KV+2kY|ouUQjU@wEZFnT2Dl0WNCfTrT}BolCEP_9INA^@j*b z;gc-uP5nPsDir#Yi@jt>y+#3NI_hpsRY9&Zh9rPPpVdT?a(2)m*=>F=&%!A?!-0wl0~U+xQp^R zFn@rd=DqKwg*RmZyG%CZBqe2gSBPQXsP_M~38Br0WH6FwxEGqZk}HET@OZ7fg*=C{ zBP*^_n-arWlxRKX%gEn1)-l>Rh7d~KF9nUN^5wiaF`fZ7xeA2!S&5<(f@dbubbWyw zJcM9LQp19}WEvO^MUE(Fq?LNP8TlXqfW3G0t0QI6kfF?kWyf9z{|g21-i5XG)`_pF z_ss+izs8%zY8 zzT;MrJs_WFq&W{JJ0f~+6zOtLz)Ao>4Io z#Teo-kc_%02aVfTE@x<>Qu|L!khP+Ie%eiXPY&G>qm>(K(F98$7Q`r=*OAmEtxxP1Wg>~C4DQBb98vug-_G}G?*&*o0^j6zQulpwUKhSY2~K1 zL1b6m917pI=&<=j>>Xzxb%lOhv4cw}D31e%}^~|2wqN z?a6udyPLF!=w`~?;unO;gx@vN9>s`4bc&{VV#WN1=l`@Svq>kWchqH`vnYMh<(Z4h zc~Fqj$4PBk?KxslGJUGi_o&jg0zn{ok1uj02bmQ82E`P zCrtv5Ax<}RKCScZaoM;nNmFT~GV)Qa~hOYhU z%Vu@i5@x4lBOCw8AWmzC(0(czd7`7;+y>_p6lgCys!kwM9EL+R0$d9K$8{M9@>IljUh%7t zCe-(G{sRrKwDWRlZDc0%<(M=QHqsAhl8QUAHP< z6FuPiI(se^mV1_sRYy9W!N)W>t0d4*nEKxAxYbSz+UHcC#H6lN*QYJ_8PRb{t}ZS% z3PZTo=dycJOfImDvR|F%>^D;Bf>eJ)&P7s|k*gmSVHM~0jMqUovG90De1(2FX?aQw zdE;;7_3eCkh_a+6yc=C7`5z}bUzT7Iz*T=egCmIm33vYN54A!e;rg1lUd^#NG*2U}CW!Y}o=W>KrJgHM=%s5`cGaT7*Ur3!NgSIopAHPd7%~IDxj4;EZE;kG zdRpF_*a68yFKqDOx)xu2frc6_rKSF=L^C5TGOK)%1DCq61@RXB8%d^`HA%8CF zA}1m0!w9-78>M-Fl)Q7gvTo%5U(<-X_|EC3EU~_}a$K{BV4#rbSD2BRdh*P<=tF&t z@eNyt3qVJ73-7!s$DIi5>RYwD(16nGfG%m(H`tyuCTJgB2(M$rN&KePX$hSK%^}{) zpujEVof3SIa7wgB^2crz--`Chnoel9a2B6ozUQbUq9oc*$E=%FoD$cY;9A?eF&P4K z7bqR<+@DU4^I=%OvY9s@_5FfUJT_?&Ud!qv49pmHzdziOgTBe0h#T%43wwT&SmlQS z?Gh41tIlA|$h->JZ>E+9Xgd4uhnmLfcDRk)?tA0;^V->w{hHrD(<*!AHBck&>zKNf zpt+m)7dNltq6~hRj;Gb^Qf0H0b3KPDYmIV1az&G?vWxS7rc?xM0_8|)r_q|8-J~l)*)9o}B;-#|4my(^%9pOrn%9m0I+sRAN1HP%6XuNMql<^fw*a1{>cDbE8o0SGu6b z;d$M!0~Cb@#{IfT>Wy|OT0S9)R`sYguwfpb7;@mpd|n25I!$hR?3Z`fKb6DyMLQjq zW8v}8_VSQr0s0N%NAgry2YssT>}PdGe64j@hv;&a^o^h2M<>oxAZ*h~+j@=xga}(C zt^iuUD?HG(r>|IYoRS{T*JFs~J1v13{^$P71w9&J+Ohp+_+gZiQ_o)OeS4M<$9DfI zN73Ac!dq3hb@|oC9cdV}R^G;&< zT~6rCdI3N8=)qb-^~{Q^uEL9P={sX6-0PR(=iCcVK1~JRN#6eAo$ncy?TMmlQNK!5 z)Pz#KxNxmLfvd@J7AOfR?BNxC6V&E~SKgH_ez8v9PZAr9ubls^^Ehpn8ObL9nc*w6 zIxxba)ONq>tAF$omizXT=v8j6G-3tVJ89*G%4G=-KGmD?XdrCc;89fBtxLun!t}v5 z5p>3C(R&E$;U2eI8c7lxU%l?9c+0<`IfMK-GmBM{DX9*+#x5F+QhJ^TH&73(8_$?D zI@CFdl10a$^|~64ghq`U|0v}*%kIzv-s5)BN^fH?rioJhDu3Rw$*N}ZecW=EWZ$q8 ziZplHh>y47Y2))63U6ollIAIKZlvwsSKNI+>sZ7Z9^Umm99j+M7DL5nB(LkKPDRS#qoXW_Qr+5v60wurV_yLpt&S^laf-pZ)cx%VK;HT3Hkf zUbe*d1mVohSAV+L=}@GZYxNcL67N{sw&$~lRhNzW5k(b`EH|YWZ>@msUFeE_8M+!f z7$neO#~4ve8F&)_`+{NyG(Rc0?f0>YFOUinpJ~OTti?H2rU_9#0ny%jY*3N6T&KYT0) zJRM#N`GO_1eYg&YM;H4F)!?Z{^sHnP`Mcq`t2bZEr6LtV8XWkMUB@Ju_DWyyc6}+>;_}XF{i2V?!c4 z(vACTAifzW*4)vP*dyiq1xz>ap^&!}J9(>U1-vL=8zlQx%8l}o+r2X#SY*i+!Z*lO zcRof*Cf~<7I@E0fJ;QEk(j2#;nDoN=42DX$4q}do(~r_sBukLx@Boq>jC}}sZLjX? zFUox1lzy;|o>!dYoE2F=&FIAJGDYBec1H$pWK4jdD$rJ7-}4uXzr2Wzl+b{Z;Q!_JU-v+K6>6(Qs#1c$UgsQDRJf&oLVF=>BWlPNCI zuVRhhrNfL#znPGd>v8jzv_aqki3{Op@*Zbpt$V+dI?bcmF64xgl;(ubn*O$bS$WHh z2G54d$(bMg-Q$`06z8RD(L3mK2+_;$>cSQZVZQ|RoubOU@ZDt7%OB|{V$R)_nc`>k zRSVowe_prj9csW^tedJ$MQ(D%Pp@n}2_z8F7igKosc+_MK5rXX|CYPkJm|A1<#Ath zMJVsE$o!9#63Q2&)&u2<9n+u-fp8P^O7_>!%fR8oA#=!$3WnFRI~n7c($w1*T@?-Y8eky`xV4W zYSN)+lVaM|g6ZVqTk#2dNZ;F1Mo*-L!?T4;s0P z6{+u7te04+WwbldhzxmUCDgs8%5xgj?>Q8edAx4LBigxWoBQxH_DMbkvYKzTzgWGJ zKk*3}S8;Zj9;A5dQa*8{#dQfc9I(_T{^J39i-EpGd)i1b5u<}1k13@5uK~yp%rVFr zozmE6rZWZ}lo8_Ax)@tW&e4XC;icV?ecgEcLm1PD`fDPrXVGvW{6Ee}9|0!Jqgg9C z$|mY4o>*`w`>|~Ec8=|`_B-Qm(@n9Irf%I~1T5N?U4}U`U;;~4x9D$}dhOrWpI;b` znPJX}D`Fz%gO@1EVy}(i#}#)}8x+s3^>dY<2N;t-PSfN1g5m$O4cPv?y{F)LyVFqDFXA@>H|cAC znZd$t;dr-s$OufqSll}1bjM44u?v8T(hXAX-d%h+OTd12=NWaswvY9-gXJ7q z9A5;&0Pz|^l-LE+D?2r?xEO85*>sT|?G#hlP|IF%UxNq=rFE(IB%A09+wkcB?$UY&ZR&VEB0x!A+>Y|<&U#~|1| z^PbhAKEo6HY@sF9EDhZ>9hF3|pPtw&yDfJwomsK@Q{dU{_Z0U$Ay;dJ-g$%`%jSM# z6L0WC(!f&nO>M3F(eRI3)r-T6VaNSu!KW3+&W!9BQeTGTXu)90OQCA(kgVk zt46cfHCq)DiUBL^QX};VW@N+f^g>fjag2}Pk2TqjO=80b`zhXX$t%$p;YUB2#3jU= z9E&eXQxrsu?ApCE&G|Q?sw9A;^r>3%CkNXwMQy-8hhHpSFptj;(EE*Yy;e4I_>I!D zPig0GnjQ@A*A=OJN_mrdT2h4g5*fft(jv=FUyYG5j4R5wds+g#=>2ZD!AxP7;M$uaOMMu@~4Pr{~ zEpRbso%L=YEpmIwyb)o0NmeT1F(hVW<$_tU3@44$(&#>7rp9B6#Mse_06*0>1Bf-W zTx4@I3yz%q1mYO5u(MJy+A1xCc4D;nHh;GM>>S|Af~kdDp`KD@SCp<4h3*BUiXMsL zz6&dZeznWq`i{|2(FqR(FRi5SOsJBUG3ke>5I%i9RPs>%MHzk>&U#J!VI<2~!7*|@ z-mZN-oYf;I4SU6uMbd%Mpq(!^RDuQYT5$t?d>%qV{muC{H;ZpJ)cN=rQ!o7p3vgL8 z5_OsWD2R_U2yg~b@9B8R12*P!Lnu$f?Uon~d*+Bg$>0s*GueKjp>NJvT!dRQv4KH7 z5^K!M2P>Sd3ko)C=bwob&dmI~p&-wAC-6JO6+$~Cdx=%Cwn^R|Em6mp(#JxkOpnfM zMQH^6;#v5tmCClSi_brxs$tX=e`C4pLR!KdKgSlX|B3MPLe1zGy?om85}S{;0D~dU zc*3dCGmfvr(=%tN=&oT$Po<oow)#>0n)|th?`@O+$?9oF8kMd)!>#E}s_~6r; z6G#Wyu#fUzQlGuR$KFOsK{lB56$r{{IPsB{j3pT<`UIfqiF((F8^=1_-gRuNAyk2} z_v<3TUjXTen|znH-Vb$bs`n{v%vy_1Z=+T~9mw>y$SsWLPp|y}kzwSbK_1t_fYgQOd?JpgsZy^2pvy>rq8H3`hF9!}1M zOM}*9rIAtm70X!ofr#Q?7w5XpN@7AMNVYQQPB7Z;XY|gbx$#FmGtbVQn#VNqHG@QR z1SrvHG5tu_&HThJErbrTclF}{7OApmK`8n{(IifqSRZ=R_kT7~gHLpqyo7Q-GZANg zSnJ_#NP_@QNT%PJEMgyJUE|(&v4G>2Q@l25He)>4cqMsKtD)`MsV>Z#2(Yzg^1Gu= z5{rAJrToVN>kriTO`Ni3vOsxG|J`XI!9mDgYa+ieg|~IVC{tZfB}>d_ep?C{nVcL) zl7SDYRF^qQ*mttDFKBja)?(vC&@(xUCp=k`1*^}*^xy!czbe@({&I8o2Yrhe=ps|X z6`1))bqE*!DYD$iUvy1uNUT@!@+proh;e8N93HGO4Lef8>`-}~Vil|M&UhcVM_R;(JbFQlqnCZ2c~{t^qemM zX!hO27*piej=e(E@Yaajwcaf^>q_zP2n!8>JNYq2_4}=BW%+RN6aXIMeL797TAErt z{7P7vSnGb=M<)IHvrW0F=fIA?E@`G@zoZ+ge(OEe*)^R9TWr1AIc*1&`CUG7e&^;n zn){1a-U;BEf7y1IMdpR8!BskMkxD+Zibh-6ujshXX%fD zDEYdWn|^s)6CZL&X~+;mKIsBqZEeOruyUu{cGFL48-fRi7+lq|(%ZkX(>H7!Kg`7< zrO_d#6+YcqSHmkw6Sw=saUN1VgD)slQuTF#Ppj;6O#7b+-y3z^}NQjA~=sJ zwYCh>zI3cx&Z14Y81!;JLQ|J4Q+}wPo@gA=>Q;Kj-z53j$*mFfm7BfBQ}#wZvS9TR zeOAVtKM||Xenu%v?r~-+tC@i&#Q0jH8#qo(;c-mrwAc1z+nMSBUFCmKiUW^Rsl1(e@uF3ho$JS$+HFZKX#7 zMhrP6w*$|%)0~8M#|;GkU>h8~LdHfZ(^Tp1aTCdt|vHhiTnwxZp7hXe?EQ>Yb--hP~sE$ zd~S8cen^#}Gfu^x+W6_WHeErSkk649WDCn__!RWsx}&WeSmN^P$d_${ygVd~b8LY3 z<7MiepeKey9wxLK?Hy}1Av!D|Lx3~z-@OW;?_oc1^Wwk5H+>qaUPx9*qq`n-h(@b> z^~88OVu+X1z20MpxFjtnvybUVw3xWjx?v&rB>(a<&AYoYVxU zBfAi>lhxF>psHQ8?J-!MV}fl5Fc*kl=oaD;g)TtkD5XkxOTa5rec$Y{RsA&Z6C{f3 zgwxLUI6j!+LUb%6&zn(SmXFtrE3aXX!IF~7k8XEAlP46wD3o*$4 zH?FN7>OnIJ0VvcJ{{AVe)o>@ukYNRF^P>IT+5c zV;jU14m@-{IueT9Bs_bAvz0gUG^{+tep=cxV&frNh| zzBC1Y*xQ*~9%O=&IYGGPk*c7~SED|I4eMCDqkC_gO*>JM%m{^Z)i%L@o5>X~@evax z7Lw_#&Dals3wQ(Ek(bBF8*QSdQamJ#w_jFVd)dKLDqZAs?BR3(gzk{2lnC>0zQ&Jj ziOaY0@K!<9FPLYZ@s>~VI=sPjQ@N{-a_lEJV9bKiRe1XO?puy(nXAEW00rr5nFAGd zXPdU_gildsIr@!aT(<{jU**r8`nsCQVD&cskK-gdytJ-`+~@AN-OwnM6}K6?PDZ0_ zPU$WvK0#K2M~5D>b!w;+hEB`Z|CmKGwX}!ByhWP0JL{sREs=um7$bB&wFWKOLm+)# z^=_t;vX@%j$~L(@!tNdPx0D<@W0{+3b(18q=Jem%(DtXM&zjNaNqIKqNo}%%e24mx zw75cVc1-2~#kzj~in-loX_q&Dok3aCTL1}D;^!D8AieBR`sJ2Xh z5A{C3%occvw=ZG>WZe+^8uS!a)8F9rG5W$_8Aexft?J1g=z-6f_uvUfBHsP|iK%`b zX6Uk1TdDIt%@kH`x2kHVdUy8)4cPoh_g4D+29jnIi_(bCuytK+^*^fQ4y%>EAs+VP zX=T%~{FSrnU76YU)+mqW$hm!AW`Iqc*WfGk>`F3-+d>@idVPnUVwBr^ri;=69BBIi z+}&Qk%x;7qsz0#fRn#Sm@`?oB(yq;4P0^cPO;umRD?H7A(e6eZ4a6;>mz*u4TKN3& z0ILRq?;>3*kK}4$W@;hio0flQmQq(t=~M|PotwnTLy~g#zmG2UAx<*Xa(-dkBEj@& zC6|Z)BS{?M06u=2-1jF@QfMPO-c!vw+IUF!d&q$(WBx*2eZzXL>93MHHK%&z@0du7 zv+KUlfykI|rwZ$s%gQgKt4W{^2CX zpa?rhC}UGoYpF`(9cQx7j%|aFY5X@_xwwR0k@>Z+$6GpM>kP{7PW~+TcS8%BClBOV znBc!Tm*rk)%Set<0irE~U;$JXefa;pSzE~R3$L7h{b4z@#sWaRGr>pBvXYxmvN8L2 zgBcP($YhRbJ~zA~E2OHD7-V8Wbea*OmpLdfDUOOk;#uEBD~n@%?f%HixUxQ7NYuh- ztb#;clSyxit<_-*LzzYJD?oNN`8@6oaeJv?c#No9z$y($n8<0FGyS}XY+ zJQ9qT3u9P6cotrZ)e)wkFj9!Z5d7KnJ(i$^;#Z*#lb_+3k$~C=>R&PskTn@$B3VPF zH6|JqS`tGNUqSY%u@et(eVUiZq{k7VT}fyuucCewR@ewOL0>`BW1KPlB~ z+xj1TEAlVF+KMPlG}p3SAAdA?&Gxa(3*gQx>zwiQ0b0CnEOlLcFVaJjwA6~9kg*0O z-0vYGEg!BVY=6)t-?WM=iG7B*SI7R{z}ts`UbN2|_8)hDFbzf9HdN+#BL@h?8H^lq z+NB<+GU~CKz*tFxq|xcj&hg})dnB78utR`T7o6f(Fg7u9MKK5Yg@~2%*Mae(}Kp%AN>2OxPsr3*r{ zQ9AKlRLM|*IkFp>lP)~5CQ*7kNW&(UMf(Yjrh5-4zHqWA9){b#V~!Ml3x8#U)R(Gsd zTvPoD5;Ses$t`t;R&yJ!0UG?eKx$FquuSMVcq*MMgQTtvhck_|4)<6fh+tnHi7-hI z{%;*UCjK}95z-F>$~3vh@E>|Y@lxd^*pJuqQp-nQBIW_(GH84~N_}F0f5rDOU22c2 z5y@P0Lzi5myo`UaEv4Yuj%c2XLDaQr(FdZ(|t-ts}&CqqA?# z7C0%##65&*PwE-G%W%s<@+}M{q8KDudB8Fh@bQtT4P7T;$cS+>-DTh$mdiZ|24zaAZxQ`n<7q zq>H{l;mmS?FzeTulm6z(sKA(J56w1KYC{$5_Sw)Z2xfGN59V$IF zRv=dW^kYZQdmQA_>s#2Lj=?bj3YvRct_mo_J;4U0Bci&8#Klm4n2dl9D=rs^ThRO_ z%{HimPsW&;6lqE*=%y^9F!HgaO*hTjb5kD$W`je!PLRt_VrzCb$Z1>@$sk4}za$fV zql^5r;CAR02|-3dET?H_qPrx!h@E?^m0ML_TEE5&{{rsZQljD@O>s=A8%0Z3cX*&9 zvuHKgsxRmBrYctW_!CdW6~GcrgnrQ$C9b`vR`1q{Q5NS#A*4aR$@*UnHz%nNotCJ5 zC;2}b`D#5DmTtH~9l0c_PW$3Jlu}Zk&p-=Ma=8Jh>TRxj9VIug#A(f|H)!(yB3pQe zkcwVgDFMgX&;RZ6X}SvqZ?dB>s$>RPn0~{N5YU2wq>(vSqJbI(*888d(1wO)lh$S7 ze9}W{VXBtHipQ$2=OGhVqwPPlJ^aJ6!kU%2)w^_Ipyf4D?61(7v?JnxD30=S z&eWwKJySKrh2KA2;2jR5ZwP*7L1La=_d+2<5)h z=|ttSO7541B)5>;T((t0x$TrtZmT1fT$Y$ySguQAWtjUg%*`#JEy|DJu%NJ#rlKR-X?yz+e2#jGQuo|=S5 z6QO@hy~t{19Ls154P)GM*J-mjepdx}h9caf6jhUjlMZjD8+^^qoi24#O3F-iz34yf z!R9Bf%y{QEX&vVrN^er7D_at@%JE8E8|vX+`xsCEMD(h&yW`&JT7*-Iu<|szW;#rV zW#m>??!=8H*3eR$VCFC5eb_mVElmA~4xmS%9G8X9{?gM2QF?&sNz>qKHyKy@{lx6s z-WQ{3yoOIvM(;2zYrc4kZ~G3xYVCJ#0k6*vbP(T)^ z`Kge$sz8CEi3~bm!m7331>r`@9A9jaPZ?X{DgU-bQZ)V9O~V>BTi|9pzT?O@uO%ZW z&Z$M4y+HUGslCgonsWeC?j|V#cpi*Jszhh&3&10CfNg>N+K|kPI<`a2QWj7~-b{z~ zIosGcaI{0X@h4B8Tulr%Y;vcA)oDLX7O@Vr0sB6S&&9O5lxQV{y*|kn;J1@h0VuA! z_VQZ8UFCS~v%teM#nw;Y>6FlWF0&Kq1cIQvbZhU{tOa*aT|oN=uLFXorY|J$Tl>k~ zL}L5u8C))7AiTfX-eJA6W<3s)rv$!E4rqB*_9^AW?|~a;G6=O-GY4h6XR`nH3Y13!)uWSA~l@U ze7&_RJS}S0uh1gvo|{8z9TmKy*d9E2x{BXat!&wR5dXi7_8Cvj-Ot-ENQeFh>24K$)xW|!1F5{;d#_wcXb$}Ds5X9YoQK&xG zjk&}*yGPM43?m3rQ#yz>*Uz7Qk)}A+_U8c`#*bv;JL%es3_FW_vwY#40LpyY|I8Uw5?XR4vad!8qM&#>=n#LZs zd(k@A`5|;tH_R)UuxW{DoKn!pK*hZFEHEbZpKY zIuHS`rY>uKh&L_lX`4Kq9c=w|eLQB*DepSZLp`SsrpSet?CyB6pnKj@VG^PyR>X-N z`0yBuS6)p`)f8BYnbwCfcE}}Gx9k}8Kj-2^rC--D$Gh|%$uLTd&BMvE#%AkXS{sLW zwvx9;*u8_;cId$gzjoL3Ap`Y$kqoP*0YGlNbQmCKr`E!#rOcFhj^?99uds;v^{4F5 zMOa_w;@o@AW?F%wTSn2nDV^?j4{+7=Yg{@}JxQzB8_Lp>yCoO3m z2r#t1uiL=IU=dIb(zfc|?O9Jdy(_qufKICyxRcN(aoU~8ul*y`>lMV*h`hmLf-oP2 zH*=)Y5HU74U*TV;rEoNKt+`v*zq~P5@yl5+`3lh-G*BKEXQKe(ssbP ziC#_Y_*Z6S=#Zp5chde8=&L~Q&8@l}OahLq8w?AY&Cn|(fP7dEZWEcTK(*Hz{x@f_ zoadBMA^N7xN6b$o`d#V&uj|`K0N0#tNA_P`Q?$~_(Jc5~!d4eT`kHa=qwk%5UDhS# zqkpY53Vjl=WE=re9B^Owe$?ZK{#FrI5DQ8Qlwpy@xaX{Z_Y!y&yIxzHSRv-v<@(R( zTps-%uCpX|GW%C10WMh(daG1v43mzDQCKpS>6Xoj`Fxp0rclM}Qo{nT;}>hQbE3Wq zoDSeLX&m7ukL97>Tnb^KbL#2@ARhR@xw7|*ZPB-EORyaUOM}h!KP6vwS>+a-g9(7L zvGsz1=wF!&3Yy^73Z*TB&F&M;kJh2os8#Y@1~xqexVevm`;5cGS)(oCiU%LyRwg&2 z#N_vFL!~|4t}k}addQ<*OA+FAz9cZODfKP*ShvCiVN8#N&iUh@a^vd++*S994)_-q zr4GlZv&&k0*B?q~)YA4hH*q=vp&Eyiv~17w*q6a-fJeLE4cdgRxHKm+wUInpybcro zZHJa`;cqfGls2Nuw^jAXJ9m!LF^qgL^O~2Taih{T4nRir!;##8$B5K#+Y-$5vUP}SUhChL?jgOaH2WOhhmKH&$p3-EUZ1`6=K6Jz0rfWz# zX1~95&rjS$UiCkSKii|K(&S}%LzU-^`*y587G0@ieI9uNfGDV$JlZ~^$>|5A$|_+6 zwN3g1Jh>JS>j2B*BE1m;OC|`5B$4R=;ju5#zgG|A*bREI_b9`di7qfS_a}tG=6i4TL+qg(8 zp~uv!Y|oKtv*caRi$&O_9~b;mqT#DCxxz^;yV?+{Rc-Rk7=!4fVJn@?jOO3DU?%7C zxcI9SgDyp32lx*wLtFjkA8V^J2#N|VkLlZ`J9YuK5B z*WEo_YRJFfo0y5S%YZ1Z;QG54h4AQL{AVh4wK`s|IsJ@tki!SOcI=XL_07)!V+!8WG@KvHNZ}s(k%~TVdIx5a@53asNY~eU z9%~O|BFn@gG>ALGy)^n&x|}0!Xj#4>lDa|*gqUPOC^suaz=^|^Z7hBfL|9zmw+EKF z@-I43Bbf1DdgW<1>bq6emvygJ^@Um)3G;nE;O7-O&eyaUa!zHR^BY=Si9`Rimc@A$ z5~txq$;(^){Q}t-QfAj*=XdYHLRCreNZHHcPk{mxju(XehSmJ9%#3eFfxJAl1V&}S znO47K)ww^)QsJMkbve7A&+h9P_I9OMpY~R7ESac@vhX9NT`{9P;lp$jZfi{;ij6Ude_F zy$+=tn}K?bC#@#^1K~SQ?#zL%>kotQC5xt5`a1qs&s^%Q>Nh0-777Xf;5N=%G&;s1 z2S^Tein0I8=v^~;{oVG%TzFe;PJ4WCT;L?LxH4VKne^3l_yZfbcOhX?_j_;P(vQTc zJWmtJ+XvzkQ=ktEtj&4YK6?orTyhD_`}bovrx@nO^b9Qy)@2|h(t2#HN6_*s8XX#` zeLWl$62X+;yRH}}i)6TgUsjrVoj8Y0thl%CRRuWp9>B9f5UmFB44i{&jSzLD1XSKM zZ08+B-0?C@yB9Jse#LtKEVY^Iem<2&^p~>Q!O0;pjU;c-<>ykXoQ!kYG=_+% zAs|dvG(_Xyj>WW@Z};%)GPy50(!6#qc_g?w1U%e(;-dfS0W;zKA#E@PNV0{(uXuf# zwR74*q|wKa^}S|`vg!iq>qRYg8PDfQu(A^+a#gcC$Er~}=9XI7O#BQ^_zUI$>N{Uy zrU09=MC>m~nOp%?^}S#IUW&$qh)d7Fl4;kHr4O2-%c$euH*xvTtCB#`>9A2g*Kka=ku`s$`CAt~Ano zHr0GTX;2j{KjNPZiHILtMnNe^I?(f1(#a%uK}WY4W`_)+z-;WB4I%OX4;(kH#U<6dBw?4PnCQ5ntpFOH>`(B+$4V26E z52|c7qlkDGa>LA_QpQA&oPGlKm{7DFjznEt2_9l1d_8kHPYN4{s#H{e{2M|SMN>{;WwH%dp!K*bpE;J{1 zVSlqY6yS6b(pW#VY<97DLC7J&`1#C3kxOZuY*1khdGa|pn!a?#CZ-uQ_Vl#a@ShRP zG(X&c!C!x+k~RCZx=iEtSy8BFjxn;&-0fQti&^y{&#T8@5<+aU67sidfMQZ+11dv=`V#=hHH#x=qXz8R)_jRxU95a*6x<=>WJNrR*<7p%>#wDzsw9CvarH}n*ieEK_A%*I<;Duld^vphuf z>S@V?Ue!=9vexM_v+UKI?2@hoXU)c6_KfbQknX*Ca7?#jL?^Om&v-of3ArBqgfrM& z5R|ja^|K@Vc3YXwU*$DRFUO^(@u1pllamz3U(_BXB}q@l@8x*p`h2r5L#+Na@PK3t z86kb8Mz!PY@oIbFCj+tUYp^l2RU_gs-zl){IPyUaT7@s~D}4Nsm3skcx5eU-)8;YY zjPq-+=-d=qfmCSDYOBol3B9`uWAg;&BcW3%845}wvb^*s?O>T#DKhTJl#Pc&89wFv zdXr(iT_q?E4l^e#O;-;oN*ZBV%*Yd0eFf&PFmyKpF1_zb*%wL9PKFJ)n5Z6r*^*1vc% zFt+3!R`~mrn!LnFHgkyhPrSO&jw73DXY>R1w7yWi+YKKy5*DwwD#=sOf-e1l$#Ob!I}j@IW8^iF9h;dk z-g_>r+dPaZ-MXe8m_Fc}q^$SX_1g0)k@Ujfy)q_*xuT+Le7Hht&9Y^9q(!pd15|II zVRPxw3O*9}Wu|M{_HVH-OBlba7Gb!NMHKIq4hy@RV+>!$(pgXc%}S;R@H-LX><*(ZlinooCw|ekmVmeyR)a~ zQtI^E&)RA2wUQrodPsE0Qu4!bg;e4;)KUKV@8bozE5*|j zw^rMtWg)TcohDb&3buuXM;e5I#RhGShr-V9-zv#E!BF()PpELd6 zGiycQwWz1p2^@dp|Flc`4@tNV{#4z6GuYZAID%*xxOc}X$A1PJr5CElJfB+rVT$qt z3Mt2WS_kz`f;Rpil(D>_K z%37Dt{s|y!2nYFt`G|&<-<-vUAHFyiq026!KK7ah`cbX=nb<4s&flgv`L7G^J6W?X zup|*H$iuS?e8^&e!W3B`8qE3!Xro=gG+F3`U0Z(uJEnFwiUccBXokNGnT#>w1%bAl zNSY+-XYCvaxc$yGv&i8=OYE3qxzKPHh1~(X<{aLM_s}KL#5w_q{#PL^hhdE|6*G)T zV0LS^)eIpCZy)R4+dJPbwTwmE^t*8dA;IM~Qsp)Ak1;Gz@=eX6ARv`{=wF$Lm7<(Y3%Uk2nYEfSdgI~k>1}z`&UK+C@f|ffnWF@9LIlUcyatM)0mVN z1-GP>Hr5jA_^(a$Hy{iac_GY_Hr3QLwgjy?{%Yqv+f6N)g-z0kKN;#UV>*>^4Xu%P zC)1@}xqz28=7D^RG>2;N>E>3ZL$~!_RT)QlU8A#|#V567Q(Fnr?vys~%(j`LXSU7% zjHOPP8mxxrX6>*~JbO?609fpn$tTszWiw?WydO?5*Y{ui@(WY!ju1%VO!xxM zzxxc^r#CY1$+}& zVKxdRX2uzHg|1lns>l-=1@XRM_8+}M1c|u7wCCeW1FrMq@c3}RUICBd2Kw(mW4Tj) z8w@-Pe(34D*S;h3%8L1posMld4fm5L&p?-`Ouwk1>@S3kb*M?}joycoN84t?<2}z1 zb}GnC0$z2wA-lQOcDZPydBfsP=Ji_6_DthTa`v8=1`_f!=$H}(1`A$bVZe_pyN2&o z8su{SeuAz}eLyWhig>(UR51ee(+bxqcUj^jd-$Oq=354d-Rrw(;+Ps8Ht*i9@EXea zu(^no2uO9V(&TBWZdvO{rOx6&;YC?DU*cdadcXnvzIo81`oJ{kr?8k34TQU$sq8bF zT-61{y}WAT0}r!0yB}h5KT3Y_p!s{(R8vYQcl34E26_jJE?=d#9 zmG?~BYJ-l4`M@hRPO`b#WyJ)-=f_QvYWZ^EV-yzoj-IE&4CJxNmC8Ixxi)5Ox$82I zA&Z5P#DQG9gB(+Z^iH&Yn1-t9O!lGONs9&LgJVuKk5z`xvhKK_TBL;)*Ii)Ft z-v2aPI83^n5$p8N{l@r4x4x6V?Zxo9OJHj&Rl%F^J7=B|v=yvs((5T=n`-xrVUhgX z@Wt4@WQ|Ai*vmQeFjb6-SD3K}wCrbD%FCB+INu~;SxrP#e1D3oe^g;D^yajUotN6` z;oFNIO|ZeRb39>}>h#@~@NRZ+|H6OrMh@*0=_5yy@a3IneIvO!M>b1Xl6!Dtiz z8Hi7QLo^)I(5@aj`P0Do+K)dK2o|M3{SNxAgY3Z!-BC`MekmMMm4Ls9x6@QgcH5}xo!LC#hfve-dO>ZQ z`f!tkbXpwAK54DWEypP-#hg8i|_qXdh)a(3ROF?)UP` zeT+z@Ha26EBjT39?ge_UCgI&Ch@hxDOg9tCN!8cpS6QqSj)o#=&Dm)&|$Yr5ET77+-on67dZ)(z$^u73O+&}&fB0>LyzzmJDf z(#I3;4U|Cqs;Z2P`t%+z#wJ)A|+VM{}*R`Wry? zW;y>jcf$Lhm*{09n{%3Sqk+4g!*-n{AYONWF?cDFWv`~i8?)(C4#^o!K5^S0Ptz3m zfZXRnC1_#!8`YGrtg$+q&~S{aR1G~QESh)F7vdB)hzrj*?kCGod%-GA zPPT4l@_46TeWgOXk8V?ap~keBqSUKqcWl@3ma=?5e$cJp>sm@#a}bBtAz>}>U}~Qx zzO3S>AjCb8=Sf*rBPCkHXvkNUrDI0v+|h_ zyZNg5ET9*q!Rvy??}*!-zys3aLEYP%_p&55_kYJp1(gh>PouCJpgk z>bwM@Dh{-2AC<3~>2;-9{fro!@VV7+r?%E4vYZ+n{ns7({cf@iAgrmX+2WY}0hyhF zLB+-Og{Jp|$>O^1tumO(Ei2ciQlz=iyW|mi8+IE?6>*>R&KWcZ=yLx92P?l+9I|2E zoLFp^UW(CF*($SjnG_%Gl8Okb)^f&wjEl-eya9%`9X$$( zZMfcN9aajSPM=*z@Iy_<*nS3_@Mpy8P(=AT`|LZqWlFjCf}8Ax1Zjn|7oZyVXuX8s zsIh5<5jW`$A84v(4F|rwb*2oxZNaUD0{RyZLXT$3Dtbw;Y@f0fWKuTF>`o+k!;PIh z?bl9AC{+8zCbmQ5nT2ZXi6)tv3m4iW=dXfkud5O!wjEQywoSg?jH)-Sv+Z9QzuzN~ z0V>5r@@U1>G00u$2ek-`e5aoWPAd&I>Bkt~F+TQ~)dgd+C~61)l{sM+#$lw#-{|;g zTp6bM;#ZW*cBh~Vmy_~FyERj~ymbP-!<=)Lbb`)M_o7$MV%u6|**ywe3+1+^E0!%4 zDZOFvWutPt6Q05euOU@nV0iwT%g9XRy&wD5>`WO{{JNSXhF%&HZ+4BVFJiqs&e=T->HJF!&Z?CH~iCo1LqfPI=)>)SOo~23~ zF__bWRwp~bwKKWqHg!4~`UY*kcvi-K5#EKny6??;{Ebji1t?4Li}>=zcL)oSbjt|86F>2xI0eG zWe3Ft>3ku#)BEgDHlcj5u`IT3@`n(m)!vtGR!M-2n<&>YB_nIBCGjyC3Bq_|zJ&?G zgHGCpYp*o3h5h@W^m#`6eK4Fo!Q5OPkH#nB*!iROy}lpa-2EHVn_HE2HLj{>C|u4Q z8=#*gG7lY_d7wJebci&^FLt>YWVd%@#;2E}m=qo5_-k}1x&~CP>*I7YA0*+ z0)8`oo8+CD6}x5HpyylhK)kWxSCK;zx@QaN*j9C8hU#*6Cn|0Rk~p9rU>NS6^yH>@FPEi6uQ0@-PTzAJ!Po%lR zm|NUJ@ZIEfE^l)g6k@#HFK)izs1Ce+tiQ#XAI5>TLid$RM~VV;CDl^Tnz2M^NVPDQ zK^1In(slq_Bt?ajH-)iu@jASRpyl&P6~{we+?DcGq_Fwl);~wMQa&12vf;O3akNlb zDO(c0+i8p@^~QWOGb|TWbt+_%!J3u|nT;KK5oti9Lh$wyBPrZ`VRIe%k4mr?YhWD{ zBmv~wgF?5qhmxFb(d`T32;MCEvM_p;ogbO!qZ@(DI3$G^Y9oFXC6~GS#S{VTMY zF2IneU8X6^Qz?hwC)L1n!8+KtsEQs$siDA@X!*vZbqo)1SQs2K3MxmPHAWsBPV3p# z2R4nK+4`gHqoSQq2jQ=!$wQ56MVno*D3pXh-y8_z{B@zlPLCwmlop_OvLQPrY|kdC z-kYIQaRRWT#Nyw(FHkeB7$5-v)Az;fkJ2pqnLE0OknuQ$O+37VALENKb39+6_8tF; z2e#F?27l?P6c;@yRPqo_wj(U&I+pqx5?4W^#ftvV6nxDyM``xfWd#9?!j#4^JqgpI zT|H%3lbaWTxCmUliXq~O-cOtgt6d7hEoNr)#5FOLkgw<2_I&l=?HfJd9@wCI;9wpBqEe%EqFpCgQV4D!6r#VI=F7A+cKuz3ylL# ze?;Lcly%4Jl)=-348X3k|Fk9n5302J4(r9`v#4Ql1{x+MM0&#VMCJeecSv9x%ygfu z3XGW(=*bgd?tqa8n3MPyxR)eVw@g*1;hPrF(_Oc-&%crqmK0P6BN~b`H;wDTuf<{l z23>c&KbvC7naAJ}nwUlhH;ZWtCdJL2crnUf*ri9sP{u}EO6z`=Tv%5P58-7H z{{%9PlpjsM-X&R}sR~R%1XvXhRhC=2z4on7tE_d)jg!0QkkU5)1~9|^mC+?SSd8M- zk)G+U&^11wHLmhcw(ZNn>>6E$er31N1udelG&s-JQDL;_? zZl$&l3YD0M)*n+J3!|l3;Hv-#xdYKB-IVu*zqN8f=m0>xvzV>_%5=h}Y^|5-2jt%( zOq}Y(#~=M28dsPl-oeVXdpF(ebmOOCs%f6N)AhzL6oRQ++Ezc*4)tm>+-^1!JaP4t zGKey(PS34h?>T(LFE5oz_pL9T113*yXu?^jWUDveKRe~ETf};DO`N-d_o}4PO=%EC zgnofYtOP(8m_O9nOa*0mje%J}nzQm^OyuG~qAd85QHBN&1?MB75+5NF8P-n>F12(x z4FqFAYdc${j8Kk^^-^USfzK#Ui_Z1tunFge{;on&JRl zDj#?qHaQOYPle2ZGVn0G)^sz>cF^G41A5Mzbu5)grEn@=ntq5`n-@z2Od(gJnYA9| zv9qR1skHHyL*|@6uBfH#b977ge|Bxjr3YUzQJZ{*!?sWlx3+Edd7w@b=N7c65zxdc z1nH0JMm`#kZo5JcwxHf@;8?YbzQC3w56Q4!V7GaC+S5vWavYq)eKP3jQ$WFdBhBgY z$b{|wgzpTEaUVfgSKz&Nhx(d^q_rXccTIH-qAP^KG9>0kHIZCbzo^G#dD<7>!V=P= zh?Sd-b=I^wO0}q*xEH2s`D#8Cu4X8RK+Envap3+6H??XP{sWI>d^k4E&v4Dm=X-|A z4$y3$Yp%|ZWQ;LAdqkJ`czaHCy7tMz5I~ru|K2cwe*ip;)^9LOvWvUU$1=>DS)Iqf z%*OdVCc$8H-?**#5x(_VXSw9x-x5e>cIRMa^I4xZ8<}cQTN9f7y4|TCC!Y6Jg<{#E zP=_{DeZ|w4@KaNAk0mkXBPSp^GsaSMGXK}{qBXG<;3k8|Bk3BNge7H4A)c{L16}7> zxGD1A6)E$ZzhgfL(dkGo*b<4@Dpcu_04a3fNKA#9PN3usZXycMK@*&0LrUz+l`k=YVxu+PV+7LelNSt zpnbW0v@diKeW9RM} zfQ0a4=UEqpe@X0?iFh(@Az^X#KFxUH2eKB?CvL8}9kN%_4 zszd!)mvDPQ86w(VO6k{G#*FyQ((&T!B#M~h;yMfGFraf{&=j-8T5E^EmFTL7o3?9E zn#*h{DuM&Avb&M)!h)?7Axc0gyo3MHi%>-;? z5?@0$l+7|aely)L6sLAgs{0rz>J{J0V$Ie+H%#m#9O~@$@X~wks-+ZM@Urvx+#%Ce zbg{-R1oaiaBDr|kqU_O(yTTxv@DN{to||;ymNhv#rF5d3fjeS|)_)oIDPUXD${08E`RTN2r@5GNuU_lIGQt>PkQ>J;uBhE){gmc(+4BaNm%$)^ZRJFDw!Hz|Z@og$ zOZ;TfLZbG+6@k|*f=tH%J_cosun(-Ggrqfw5bJ(huhZbOf)?<2i4=#C2%#tJkRI98 z=+A3wAbQwY;++ohF(Eb>2Cw`G6YM^9ToF^vp>md)eNdBg8`_#3CZ0vpz4!XmVV4J& zTgV@nb!LVWNpZ1i5~xYBcH;EOu#RKvklFR&DDx{vtz*=Rx>IJ$X4_ui>}y9dlu)Lzhpf^O-kDvu;bRTn&Spb>6)mL+z& zNk(ZKJL9(F=<4>Jj*T`4=h{7JOvm<1NkY3TL%Ca|eYX?v#Rv zTS0!o%WKC4d5+g2K93;w^p@Q-5L9KgRJj0i*kxxt50?T&sf{e!BGKWx)G7MT|06lS zo;YUf>PanHh@~tS@_(q!`|4NNao`P{F##5JS^3O$(l4n`(J4yBDk!kvP6`(TZ{8FX z((s_1WAw+s8^Ch9SiLTWP8D>#x{dOy`>%^EJ;g(zlg0arG}SThr2FBADl~#Gz`3JH z@~72hpmd=cE|t;>+u5wn_$oI3_Jx)sxxFQlzITBF>}(j^cI1bz7}`!zH@7Hkz<;a% z@*7(~h-r1rit~P1OWwEnk6KE|4hch;UL@iPQRp|2MZC z>VUU{M%A@7!(M>W1U9z7x+%!5NAv?}Bz0j5ojSdbt&FTn{T6@$Fi=yTE}oTw&xl_Y zL`sllc};}8WqlM|>5>@08=ednzURUsr1y_u>FuV$7EOzJ)NZUz3=wVMi{?gQme1Yc~G!NmbOl62Q zB%}wrv7&<#1JIQ|j5lWwC5pVIJ&h7lN_o(O5>xp6$4&motfw@9?w9*9jM z+?an}_cg!7C8@5CX?|c-#Hr6$mEIp3y^tDtD%+XED8w=^e^nq%SjCb z&T)-sVL`}^Ixbh1QVP_T$=ofX;NG&H>F0I@k=yi!&C`DzSjX_i#!Dtsa$B+S;6nEl zHh`z^X{KlHCE%cbkoWFi85|VN;!}k=8@fG?hn`lCN7qG8nkprS$gN6$tdw)zy3eiF zrU@`A4eWTmw%dMA5|MIhX9!x5)b5Yg{<6}juK5HbcGNfoX6vlvMXlwre`OAIPWS3N zw>0L*T;LJ?!A))%7lu()C3^6K@rKFE7KZHpP7Ghj5=1ic(Vm21@qLcfo6|pi@}y?O z{&Pf7w^;m078ws;$T;#HS{E)Z+hPRu{#4g#Zl3{RMMtd%A1@HnA|enz9s3dR_QLZ& za8A;Dui*gF8gSHFtp6|IVBJZ)bsi8t1L_)S&kWVCFTzdSQn57?(VCSpbb-Q9nOpmO zI3LIwfo;OX@qcAX8c6Y=gj%!=5~rGVMA03&sGR;E9>@|_olu6$#7OI!Q8?rN_9?gi z#Iy3sk%UjbMxvq|*C>^^bzqR&r(cF{y5}+vK5dS3`AuGQTkpx>L#;fOi;$pZg6LA= z6qlif#(r>7V!h4_4Q_v zSWRAeS0YjJ1c15Zt6+RcMN_=i(F_)w%#Q+0kb8W{Oz&LzeBV&Wan55NeoXywE@0R)b(SFPk2=1o}4`+L7a0{BV;t+SxDo^fhAh-loR`U_^|t3`+!Dve|n z^)viPJjI|H?!Z1f%FpFpBmOiu z{t6xNF_X^^C$D4n@>O3>ua74~a%YH6aClj5eB$Mt*_HIR zd56zphxmK{qwN~Ac4J*bH!am#dnUj2R|brT_qi*9Wd3VA)K42e;Bqh)wi{KF+74cP zZqWO1$l9&6=^1F;S*(n4sfX73I?UqCr=jvyZLgN(Z^p)7S7~N`b!PGtVek;~F;4za zshmk={1O;)#y#Rf!zE#?Tw#m$^tdDJafR6zr9{%5vT5u&m%Ecn0TpqJcj3 z235>~|HcpL2sgXc{aYaz#3P0In~cJ&RoF*&Pv+1c3Ch9?Rxl3992vvK&uAQ-KDhTs zq2@^Anc5RvWMr&ocl7!SN~_adF~Q{@(j`M~nssxPtF}&Y7B{#UXImMYk%gb|{L&Mw znW;{+f6ROt!((7$7ctK_p8YEm{eg7qZ%AH7Tp?P=tFT3SZ5nB7Q1txkh6RyKwBaPU z38l@xQJ(~1ccM1bNjP*+IJG&xOGgW57G34+a>iWGC@QD0vZs1bXfhc6Zi`rwUEBBs@XJFjCZGrhJ#w zVH1J(YABB51q4bZ_K{;JS<~}-@$F9>Cj@rxnrV}5)@W_+IUe=WP-sQg%PaNRKXt9@ z{)~;A<@T9^hMLnpQ~8*k&}za+5l3cS;pISUUSY$G0Qm;TK}wJnT`}@v!^aoloOK z^PS!k@3h7ihpkdNp;KCdk(2scGm4yylC;L=$`n(ao1B`T1}=VI>AM!aVQoDrCYnjX zEL7w4+~RP+XzjbWvR-ZKvvVj)X#N&zhO7!>tBP1_7I~CAF#vB zQ@hja(`y^UipajAWGC2PRr0;=f~7H&ih>!zbV#Yrrp*~g(HH+Q=@uS*Nf}@t5=zT~ z1(Z6!jMH z{DxQ-H>jIZ`Q2SDCpbE(O*fyDjh<8v9rBRI5#mQ|5t9^ z6JW0-#bf?-GyiJHeNc5EZ^pF$*3fR9KG|)*LHC*7*&wNHGTnnGvm`l3Ec{<@s#iZA zKpJTfc=1}D#<-o{8d{0#XIB%}&+voH<*t9zw;>;o6u9|&d%C2wTPf~e>hCBGE-u9I zPR*(5=bnMOe>?wOg_HzPAB6Kv=ny5+FEQB7DsYngj^E7jlm7mT9paG@NPddlW9C1# z>lc|9^32JGlbx)+dHf-CT8&!s$Rr=5_;N>=5s8i^{VOv}p_7-WF=r>wu{ncgXQ=lk zDP1U4(SxG5i2x=oe6VJ-{+iv?o7+c*pK4=Xny(T*S@{I%)yGBvoQI44$DD~ zw`qfJNH%8Qr=BvfLPq!nD@@l02Ti!VF0Q@ZbQT@=WQQ6%^r~6-u*k{c^V-8LToRl? zO_3-{>fk)bK{NMNk;#Hn?z!Usff-MHyiORdz`sPWxCUPeHcS5*-e)Cy_(i@GWBhLu zHKoIa!>UVm2s3mhd{JSSUrqQDW=sC9@4zS8XQm205;L#r6>fIsWutSN$*X?H!GN0B zE}aSZ_js?D3{|@@x28a-@=82 zz<;6><7_9-nX67&W#>15-ifu*wbCo_W;nr#O*E`n z5kAOewtVBURjXTPPCH`Q=y^>FtY^*QpO3S9hd!G@XHG@-Y(0x^Y_5|fKk_RyI`*zr z6AxsDIYzBQOa;o&$2J(u zu_2>bQ@uG91m1(ELcV#Jp!zKOY0bf9n?s_Yin;mcr72k;2v>oAgWvAHEZ0v@v@h%(KpsIDG2fbn3OaXBoT{Q!V?+z z;)&Mql7JzaKV@|}S$JY1!-%2rMwUV~tiHP{cligj`S3p79U4WLuUoNQ#5S>`qN-)H z?=zS}N_)@Vq-@nPm+8u?osNXuYFF)50;Zz8?6zc+5K#utu@0PA4UDcGWmdV0t{ljD z9`hdi*`otZrJKjJlA*8%vK6iesi_tn+IN#-xeIrZpSDQ;IGpj8!2*Pj?M~Xq3yF`J z8!VPUxafL47?pgRb7H>%X3NBG3+3~I^T1IZB+nrhEegXpitVu(>HfUO$}5d| z7!(K}(H~$aahr*m+U53^(LTf$q@@CA6Pp+_hVPZ7>W$Tzzg?lRbB#yMptxAz%4=5okIB- zlFBNX;d%hFCxW(jvKQyP&IY92}{Hru{+*U~93U^S0`Z(0is zseXLy$9vB4B-Llam4i7V_5IkPPalWxR!{-9u8ma4j}M#f^@_ies%5QGGg%2%^oM1m>A;pu7 z1sC|fp`ur}D$KQRfM8vZ*pQaGpP;+D$+r=nBu){ZojytqD`D{t02s6SH~E4>!{2t0 zjv4!WrGw!i($&>jzddZ<2dhi@ekYt{6n_@l!HE40n4O5sKiQod%yRxMYA47263;&H zqb<_k?}+)g(RS6t`KOkDhB-y9?qz<48BStxWA_jE|2S~rDf0h_diOx4+y8&OZg;mz zQAsGQJDrJ2a<;p>cS&M*DQ6avkdX6XdsUKhS|x`Zw!4JnFmjmla$J&@hJ_d#*34lu zo1O3P)#s1j@1Opuv0m5px~}K*@q8SfkMC!;huUcDjgMF-rS4>EVpP5FelmSYchdWf z8*|ABEU`bZoIgR9jbUk#?1 zXX&tYI?NOV5VG}@w2pS8!YmIS=sVZ1{H?QJj8EKTL^xsZB^Th;jFslBjC7h8%Q zWtz>$Yd!O{qV*uQ2U)8RN=;Ca5@1294!gw?U=|qp3QOzF82dBFnrNwcUxyQH{mcGm zv}FXwB$mKsvG~0$RQuw#nYA76Jwd_wPneB+H*V}b?reL8D)lr$*# zKBlyp)653`7S-g|KB>#nsK>znQO`Mjy&#+N*ZNqnHN_}*UC0!E3Ih1N~tda(UNgolmEkCpKp z|EW7=;mYYdnGRfH`fZ2dmZX@|<^dW$CVyNxZ?f&v=Jy4$#KmWlxFGp4&+w_W&fvR2 z$c?t4!L8Ropfo&)>U$traI+x#j=bk1>&M+tQ+TGy`&y04E4$KP_37L0&pxF8Id*?$ zCbp^1k|)0}(`ip@^muKC)Zq3yJ}O2kWAz@zc$RQ-jpgLWV4z>6zN4@v$mU_c@$77u zPfc9KX*K9{Oc7lN*~RJRL~m$4sC^w8J>gp7T%xXoT4^oQKZ=oTN@Dv4Sr0a ziI~4t)YQ6LW5Y6p(pXcb6u$X^SuN~^jii3UAwR&XxpI@Os~e~<#`-K>&-!u70pwS#Jt}9ZTO*#<=nAzc1x#qp>t3a-`}@_F&h}ZHa;m zY4jm#4Vhp(RQhoNKOhQ;BJXf<%X-Z;2|N`CeRUl5&dE1|W^p1p|F2ES_u06QD3n9I zWOvhStYfUn4()$4fI({*NG|n~f?-U4pF`|^i8zuaN0;7eB#ex+idOjUm0zOI7d7D8 zkqhH&G|gH93xhhW+kXp0)~PJ2PN$nyOa`lV&LPx<4`!GPQQ5m~21-pVd1Z?vYnd&X z?+@ycWOHnK<+-<$c0pl5yDmJgI(tHMYKM(we0W+0?RTi-Nc&CzJ^g`J8xPVs{Qd#8 zo2YSm9)71ead*bEVuL@Pl_+oOt?$x`-W$ml6W)^c2lK#4{ez}fM&m~X>!RP%fb~t-4>SIi}(&LZSDRvXtdiip-Fc<&g8YMM(|GRId&cK_)1U?wD?(P=y zebb{?^}fX<081T}m-NX)TLXNKzfX0!E8hN*vgHj!X6K)uUYsj9t* zbuT=v>5EBB^%ZXMe@3Gc&_S8&JQ0G!Gsy$wLs~FI3CcnoK`wV4>nd?QSJG23j%{b) z$OV82bQelDI{LIIxp-3vJN#fKvCBV77D4S_mf2W#4|YulTrO-QOx!kVqs_DwY0;i3 zYg^L zRUAyhF4!4^_rd0#|DwM(_u2Qzd1yCJWPN-R_~&oY~3?Jf1&bak(1Qa$9{J zOh0}J!%l?8RN5)o>$*NbXy4*}IabuPVr-~9Z*mjtm#*e}q>p!rTq_PovzJ)Fh}H^A zi#g)6f?P~H;QV)$l2QWYXgA0Nw(_iraUaj$S_Naf1fha9k2ma51vG&GWG1C+?q$>g zOUBlPlRxf4qcIhy_s9TdyVT@mxV5~~r^aGB%tYLvr&I*~m~Xs|n6_`b$6SNA|F{3+S8D^TkztReRd9QKQrzfjJd)CX6@BidV+R8~11CW0q!s}JSz{4b zk%3$&W`sP#oW>WQg+G>_j{EUfG;)-wC_mVxWiq#MNpH4dAVBicn>8FuhJ4_7D=T=3 zl@Cy6w)9oKl97>2mQ*qapcnlF1x|8zP~Xi#Zvv;GhDvUz2k;S!Io+{HqHyu7_N`nj z);vicw&RNJV4&!o8{E>(Grq3AC2B|-WXZ`pneNg5NH2afh$?rM;ANlaxCy?8vs9I* za*>-|JUyO^KjnFGrS~>7vW;Z@!(T&;nbBK9B;p#pTBh0k76{Ed)Km9JMq_D#>tJ#s ze)cPsgxqPpQ8+%GPLp1Bi|FmZ$y8myCVGlue&zj`l0VG6KX=^T>dB5V;_}WJ<%i+p z80K;!vU0A#xU*s4cg-F+7OY`~ghRNw<{H1${zaWRKHR^q6>R)K*K0DKd z%f7z!bQ8H(6&Q4=0w@fQ17@lHQpA)&a5AaYPo?~3(_P|lw&C`}^lu=`dnjkP2EKOe zNhpdGJj$PzK{5rZ5h00`tU2DR#BZ zn3Emg%Ju5vNP~E)0%zHKxnd0coA&XT6k6BV0uwem_VF|dw8Y3V<_OU-!36S$V1+@a zOt!5`Jlm*(WCu7IUJRY&Cir!ZW*=Iwm7{*I*SK_qgV|VESe5S-$SN-xrVNQH*p`+0 zk#65DotL2bogEb5&f)wHJrDaKj>iBp8jj2wP?l>u+Q}B++$HNaK)C?FQ`xdhzP;Z< zWVu9RvI_j+OpN!}%Io#D5-sF>ED$U(3B~9$`F7(@0@{j@D=f0^V3bXrFb!)g^UAQ-cLm`bob+r4ZFtsrjBydD{S=Ld4 zLr*~(;J#IL`~s&J&2J(jF3S5OGiXcHPw=|rPL-|F7@-X#Bnf-0mXL45eU$h%fXG6w zFyBTF^3Ra1!Etv{R{jC5%JtgC6(|@Bh@8YcnX?|bn#lhZ3Y?3VZ0_B8)nt(7J}js^q2c$WtIwiz zlbda&*mK{RaE;?$BAj`~iWU zp&2awS-W#W29`q0_dCxca`qN`_`h`@tXg%{C)UjKqEL{6bRbR{gFb*b_z$X`h=u>T z+7p}JHu&gQf(E}DA`2rc*`VB+tqVs%Rk32hnlbOY)UYl!ZFz=Ii-!d~XINo6oYzS2 zY)rt`!7#;Gm1*aM=|$1*xw;m7z0H5?>}!NvxmF}oya4sXd9r!Y=2bt3(*3J7YeKid z%5x5|=$F11hEh__mv*O?1oJLUXsrJ|MT#r0On^L_z|lP}3p@a!OF**>1* zr%XHxvjVp^cfK}-(HJN~C?{472vRMf?)ShZnlJa{dU-`>50So>rFF8?UgMwGIX}rD zF7651pWPgowRVYrHvzaP>V6-QZ-u1bR!Fu^%s3pJ>}4xE54Pi2vz@*97dhNClSF{y zbSA)irFF?|pl}320S{m**tZ#|iXTCx_JhWI`hCH?POwJR7Hj#gTJqS){A(RPZ1V`# zWX8TLX^u-X3z_KPawNG%{0Na!xeaS3{4L1jT4Y4Fc0Rw zrlXgSG#Po4*XEifXCp~k{v238#w}f}f?qY3Z91RQ^-t%tZv#x}Q5Q6Axf3EU!*}-h zplhD=_`;cTz#gE=JM$^ufRcD)vD~mre9h!V?|EyE3{*#g=kI=;lY_%lOoDsGvOKR2 z&&ndda!*_-+l`)fT3YD*_d!@_SE&}0&==hSm$yodIXAQDFwKD7cWVAf zE*<~D;(6R$`phk0iXwIDg~{Qt{5^zAT4G*7+KkS6zWioeO4k&*XYtNBfY~t9hCA5V z(YxA%TLl8U80(LwiuWAiXV%|~kHA0YswbXwIQc15Gb#*}n?Bw@>C7h?hw_ykiN751 zuBaw&PnC7rTAiAUaXG7C24-+YCQ<-Rt-Q_C2t>vVT=P27-+++W-2{7aW1Du61zOUB8K$Qo7wP< z)&@QDOp|+-0f|y1Cw+b@-XEx}m4VVZjozB$_%*@y^^aJ$UR-&=NoMiVODHgI4f%pM zuV^f?R{uHBk;}G(?e@iwCH8SIhg1%s=Gqwo4=O9Z*G&IPkI)+idce6ObGbkENw}t$ zymra(uOwkyISKIlNLx=+E;LeGEso9J*S$0nA&a(?c>c?nUAn+91#CGtsh^O@$@daE zSLW;SwusNo-JswX4bDvzFTS+8yPicWIT?nv5Bp<&ZtL?Ntp~TbgBa7LSVdFDzkD;6 z!%(Em{pz{*Lxxqe>2YVm?%ImCSwyhOrM}3A6U|I&iYW{g;X8SAzh4#X(iJ zms7B8#B{b_cf;)s>tAj%^~E+Wf6$6s1GLOz+#F}2eO2n?^(`DhsT_m<>7uZB8r+2x zkS(zg^Tk}azbUN{7y0C9+l584BGq~d7WnXDx7ps6;|MQo8>y5rfX3y{ez&AT#VD)E=URG~=qIC@~nzBRhTBeQW+>aV8iVmH4t$j;^ z7e(sn?_YwPzie*>D*q=@ue;>dV&6-aVU;`_xft3V^KAN{cNhQNoJ%0)c>f)FWeWgb z##(&b2I)KOTioVXQU*bl|F(TQN_T~}{qS_?cf9Z-{i244;YfA4jG~r8VY^fqN}MHC zdO(r`{A^ob7j2Iafhn|r#plmknZna=jk|TJhQi6ya>%Vi;+zGWV2MsHcOHT-|!7T=sDdR9X9u%Rc{5G=7p;&TC_vK2?F5Ntrx6Dk{KD*giY2AgXBig+i^)hVW?`u}QO#6L@z=!yF z9NJpCHIWV_SK%XAuWP9h64c*w8w#F=+z~2bqrpBFAS;;3S?YLVM*_rdy4vKkw3=JH%7u^QWRU=uP8q@7y-n|KP9aPW#rMD2};Y zV@+4%H=N+**#8Qk3JpHrRGX+v`xn0$IArz4vx;>u)``i6Rqh zU`GzJQc@Q2c(1hi{fnT3x>&N3Tbkn1G_zfZK15v=56zu_w!?Ac?imKwXnEfB-4#&p z!8p68i}skHbEJTNX2hz=!t2CYMIv$h`P()}q(oIR4=624dC7#Cpwndh6!fy}#{d0j zW|@N}ZB_&%fMP(Omm1E`h+n=vN<(_T9*z+ zw5t;(ijP<0KPwwxyqd_fG#LIZzB0Hj~Sr`Wy zQFe`AT>s^a)p0lpR~55hE{Ci1=2{GOP~DnrwkL~@c%nWviEWabUbJDYF|r4ANJTEN z`tAR>+Wek@`sdv$K23l_t7IGxLPy0;@r!g8r$g2TV3jd%SFNGz3mlpB0WAT2%#Oc~- zfB$%OF&kRF9YMB)o?+(ndheXt(|2Pf0k_{5N7iI!lGP+7_W9G?*ubPfP=Q|S_C8@t zJd}KAA}@dzY)PJ5I{YzgXZX=}(%MdReNxXf8t}2C5NvtQosGInv!hPud`Wb!uO6!O z*U4*?4=QF9BE0_6(9LV}R}rKQqDomAT@=;_xfg#Wc|ngpPJRq~kNYs;C6Dzd5;%U3 z@*~3mN6Iid&pILzD^z%Ze#wVL2DoiX+0_bL`B&-ab8DX(P1$pb8#BOmBc;vy$E%Q7mc6&iV)wAWx$cH<-LJ(XFJ$r0)nh17LX4J%8Ge&IheZ0#SHlb z^byaxbow_OHO!?KkCXu4g232vmu1us$Rfto>c zQ>89@fkjL{Kf}a%t(W8E?e(j~4rt@2h;s6djU}pq3JW7|Rwar$Z7B2jh@J%gbns?g zcivKd5o_j$S0EL6lkp>s&nWfaQX2GD^DhkgaOAi@?r)O~B$bYm%GSVf#x&DdMC#H1 zh*|lprr)A+xz)>pUNnLo4UQ_kuNsU-QzD;$*`Yk~ta%L5cqSLtO7Jb}J<+#>XKoZ%G zAxkrtC!10+7X8extE?e$l$Z;sV5OIRZj>IQLDmx$%Vn}dY27|%P3&u@kIkLvmlJS2 zaENZ6lq67BV&Uwj9z2h~%*IY;b&rc^7#tm@j?7pf}-$Q@z1BiG4}Zs&&@w%rR0Vt36gs6yqPvs z84o-nEnR~RAATEIS(!Xsxw285$Y!O*-Br-|9tCN4~Hzn6IeWWG0 z{@M)sthR;oh&W-peNh1YG)Zixuu4MWnHfPmLxJ;1-GA$}`;`mhnRdq;Bo`(Me8G09 zik%}m*0mX2c$Mxuw<5oH&kP4`8#dN3L~*|o~T?D#HAR$Hywqzdcf56QzbV-ZLBuHi{)8aMQLaWslv=2m!8dLgwaL~-ia5t=yf-i z7S~Oz)gs(2toB7%Yc@ZUwKnOlQ%KVZzo)|DE8xiAjL+W5u6Yon`SB-Q$KnQ-&uG_d zTDf6ADhel+0@4jli(`uao9k3wdP}@@j_~$VHLKa_CG<8>6(fqM{-$n;Zfmj%B9Uf> z!%U_7I_*cY5)cRXY}!LOIOZO>G=8zq>h|*?(H8N8;=Idk!@FqFl=jKE(cs}XXm#*D zyF&rC31&J^IA^=!9vvLgC#P5j9$TtK>l*>N<%E8CVgc3)6-MIaybXL#`+?9;7IG;x z&LPW4lJ)Qsu@2Y6Ca2TXt@kCIZ_E9JHQ=6q^>&Qea6kK7#JzP4g>#g;K?=3WjPt+8 zeP2v<@$3vMI9U_F=GXz>8j973iSKH)W*y2WZLV<@9CMeIpYG|^sJL0CSD~NZ;f-%s zS4@sdUuZWn{ao&I$Y$$#{L(F`1Hd6j15#yzn$VkWn1nnh`RqX~ z>7o?if)9+)eIr;+jvsB&nBHO=R3_;?x#xxuACko>k162OCc2q`meI@xFYKmVr_6wI zdYEa2RnBleUSs#O&UmjlWV>f`OAs0tNAoSLeQ|V4q}OK$n^*h!YpWIb-7c<^w|Jf( zk6&kivsB0@vpOor`#n@9upcr-Oz{f5vnh2Xf5G{9pX6uL_%ap&r#3E55|cus#RFRQ zM_b>SzL23TY4)ew*uE1TpM@04F z#_U%vw1qti;+_>e2M;mHHX)Dn5b}TGy>jg9co19-Pu-R6qKi>V!z>0nY9-cX-Ur!f zvM1BYkA9q`I~>eVeY8^VloAu2K2VdB;hcT0g{Gbw?ZCb;y$@@o7VjNewMY_Mn~m6I z%|>u@2MVq|N4oaV-z)>OmM%j|*>P)DFKDGDe$*FR+S$M%|eCBeo3>sX&JY_IQG+nm6w#%QA?~Pn)ZgC=~zqz%5o%&^blvWw2d?DU8`xM zg5@4NJi#2T+V!h;zDqce<(HdEI{MI~l8=i&)%}Ljb4BPpt>xM~n(enc*V0PgNokY2 z-23<#y}muS)k(8G9bfJZjY9K=yYT!wQ1=e|VEqwCHxKuTQBPFi;?myJINuI9_t*#K zEHm+ejjzY0XZ?ns)(eT;D?HLGDwe(bZyo%oygX@I-R)1LE z*mH-3q@(%%tNiG^(%XPU`TjgY(l8K%MHB{|-=BzQ%C{FBKFr*2xY(4V#UXE;Lbr5_ z9;xegAh)Zf_4qgs*C3Re79YmhsXhssbh)O~u<&L)TAHffr74I`nfc)DWnK@7k~)Dd z5%kP{z#obzF;MroMw}#0gf>>#6Gv~O_3eUcb3YUqDk_wQTD6_Pe7GC9tHW=s*x`>e zWWMDTI#RYvPKDJXI-pU!;5>Ifd~2(AfXd^@knO^woxI-W@vXi+akWtA1^4s|+&9-N zUVaeW8#7Ns{~nXwYm)8WNXj`;P{GEoHzPat@#h`FSgydP_NE=($@R&XyGKol3n{J* zMn^j7HMZW&)NB=%hj@L*&UudTCO74{V3}=hvcC%7)8nC#8~UQ`thTcf|I_zN28OB+ zJ83(rKe-e*X+m`^$V|t@7lAOY1I# zCp)G8c%z4t;iikF%1hbL4e*RS3*JsVvCbxST$$Vc?=w*h`!k|?WrO;+^hc1$;cypqgR~mgW@OLJ+ms? z#G)ik$I|n7D?=Le{wX2r*oS_Bi&9B zG$VHjkA{pUk_gz4)P&!jdBp5mnr9z!=G;?=%K54yEZz9Ud|j;BZ^XEL^$$O}>@c-R z_KM5QTwOSUZM~11I@AGIO>pES(_x9$hhy|BF)5acW#0oEX#T2DiHnLziIw;Gca?=M z-r+By8dJZ8BYzMV`ndQ`SRK@X+$g=yx&75XfbMw;3K;X38QM_q!N$ZlizrLaH(GKN zJB;OyGLu%#oQ>QA@SS(J7}iWzo$N8jc~>`x8?N;jtaKCn{T`98h$nY{>R0D~oD>l2 zU)-$DR=du0ToXp~LH+qtPvw0SLE1Sn?L~;n08x2$0%`1#Cuk2Q6+3~lzmgr`u<+D| zs=F73G9#f!*^ZO;J4-)3YmSd+j%z{u^7V(ZHKj${6^J^M_ar>Bn|w=W?={_JFhP>H z+kBg5y?a7N1dUcIf76aynf$TPv<)j-lalYmK}YG46E|1bPYjBms@z5?mVupht?&}F zkNGeBKEs%7=ch(q_SJg2$vX|LxjGLQ_qN@}DI{hgD#-IgM@1}%!ZW|S(6YgSFj8uJ za8lqYw->Z6-6d?Zr$?WD-ZdMmh5DBRS1tsggZb6=BXxD0l4P}kVq9S)Mg%;)*7^QK zg<<0b?7dIjKC2**WPHg+#@-vsMR{5KQ?JTCKMRxnKwH&}f1hz4_vSgcdf%FUC)v{Q z;8%T!^lPD-qIZeJKnI`GiXoQyRC>TLW~an?aw^r%zyh!G^__pCmpQ&sfV5$eOL;| zGPVD>YHpK`=Ogr-xHop+us)COe=~lc?3l)vHnP34E~TnJZVSQt8U*avm|buswvHJ$xqBQqMZ|>sO^|;bHr9@z5RxlEOIgL1IGmH=XNJR$;wo6L9LSx@{MHKzoULY$AQO zV0Lxh`_-e*v^v|K1BnB>+t-N0*uv9%n+)mA!HDIJLThq&v~#wbXFCoqy>zGQV{Ki1 z-MoovFDmXCJ4Nmbzr8&Eh)qwQWp*Kx)J>X0?iXDSt`sKiS;#>jBlasitvK;I@gvM& zYW?j8MqrHJaRxqB)UGcp;Kf&O#1f8>wSlEPW%sX3}iqdJu+e!%;59?tsBhwIh#7YrPPp4+i`rbJIt}$@}~1s zdZg=b-R-we>l(RLI0Wsis&4MmyF<=b8{i}F(TVEerO$__EpgfldZ?hCwU?+$Dab_^ zk841`ZL}$+A=eADlQ@P=-Ng4>Han1acb&@-OpU9!GvGa!yy zK*25jVVCR=EkohEAjGUNT%()G-+IHVku1TLK^~)M`RdFIH1aVML>Mugbq^j=g_6&R1#(ol>w7!7rQ8z7 z5%azgN|yte!jQRG{?UtY?SJ0-*6Xe{F{*z31SWo@M7AuN_~*@~P&1ITo5+@{4K=m* z*RHnXawbD1aCnz|lf#AkY?>HdB!%yOLdujBz)a(!X25T+vIz(b|$L`aaI1lgIyINlAc~MFTxHvvO$)@;GXT$Z+Bd?^fP&e__ zm9D5Dgm{BUqut2{cuX@vbmes4_$;Gf|*rtwIMs-J^B60 zN8eOY)FMUmOBlu>sKi*;Fd5kac|BeUygqBgs8`MaBUD3K)Snp#Q^@4IlhI?=A>>9f zj9Gu>shz&3L%2Lfj_s`@qE5m6pCcxs%nsp75&hP!VOQVCs7qlIEEY?1ASOFx)%h*M zMG{#sWRR?Lw7~a#&VgarJDD>iN=~)=F0Ag;(=pPCtp$;X zhF~8UG>WD3>a1iP`pYqn#k7{WpygJzkV)TAi7hVAF>9^nOr9BBZAZVsiaM<8MK3)PS+L3LG*40;0t6eJ5MaLuHX`t5(0)xp;$_e(MQV1JS?UJuDwleLVx)Rb>%P zLOLYVBOAX~I{f>7lD*k_AQGH|vSO0&kn&$7x~W}1^7!-ck2A9HuktqdKVhGKxNUrM z&_NL`v+lwC2EV5$+v=ZR7~-Y-#q@?@^Nw_v&7VluQBCM8>0Gw@$IhuS7z^9 z0eK7lHy}UhBr32eqB!Sg-5zj+QM&$}U3u3yY;aaM+2bp{of;6Lth##qBuhs{E%b}} z$rumxZ#j|-_4ziyD+UwdDtR)+O~y$@v^_O$8X)YA`oJPNpf<@98BZ+Pinzspesmd= zM2k;NrSPL-!|ph2I%;b1{EAX}EWI&tL8!WBWiw~v5Y8TmQGRTk{*NqU1V%UmQ2Wya zt+**3nL$QWhI6ttr$b}JS!$Ijb~|d|3nMPKXwk)#xRM+@<0eMvBkd_hp8Yt0)!)m4 z*vN%)S=N8+zJh@xZMY&L?~jZ~dnMNwePB$A5qD5!<%p4Xu1~ebW*4{BAUaTv ziK$YMASd6{J^#$rWs+PncIhTKX#&wg%XK$9^e;Y?yd^i+I2 zHBab#PChy!U3%Jy{<^gnK#a?)^Ko(wzRc&-t;*l_f^S~{>#>1XS)GsPc{B|K9xYK+ zI9fU6V4CKLijZWN)$d$I#TVduDpG!#!XiFe0fOvJOk*2Nvm?3${ltB zcWA&w;cp<=bRO}c&dPX>cIarn5gjF36l z?W+gf>=Kezly%YroXQu=pmoV~#T3JR1o=RURurhGbHys(TB|1KyaASIT?%koKKnFc zr#*cfW6ta&=f-GMV@D8#XLSz<>J-kN@iYOHj|d zEIh}+jCRre6QyPKI7Qk(;7o8+bpw4$IsP{U+UNs*PxgqsbIIj@Ab%&>>;VH7plhb{ z<{1g$5bw-4DLpsc7H3=c{S~hSba0qdN3uw29_~R)g?nQj5><71^^2z`X;;7@kduCr zDuMq4Twr!b#Y(8q)3mmlB=&WoEkuJqKu zxs#=+k`L%$BbP>rXA+sGp&g^SjBS$S(}8zPc<*lHxUu1*JU%9+LOjN5rb_MjDm>P5 z4>~&Gf-J9ne}bXJUwRO$z_ordqa}M)_xd*cG8qqjrOr3r*~?*c>#Gbg?T(TUJJ$l~ z!Vb8r`$5qOTc7GN=vX0txE%ka`-bV~YZn_oeU~1Cypw>blMiuDUEbLiDQSi^5EbB4 z4aY0rL?P9sih}qg(I55G%R-()Mk6eFDn=niqW(XCNerR;zvZH7r7`WA$xyAz!ibHE7!cv3@(4czKgvn7$Tijq! zh)B&Rk`lpbM47l5Q9rQ;yS>5r_{v!bHm6!jCmpj2ar_ri5_5I~N0v+njwsn^h{5*y zCe8rJ@`%OF_7n~Q;Mk9>_hY_%MgJLNauop1p7C~|SG8e%d4hwJ`j5I43pXCKrVn zCC<_$m+yUnD~u};C#oKXCtVUY$zXf>Wj?$Qbuq#}R?1YUX{sKyFb>Fp&PoK(x> z&kRGM3vJ_G9SAA3F>r{rA9eq_8 zE>{YXm`l$ePmj|M>EIy&qpMT?M(0z8o&GQ6_7_xZ#@#`SLPKLt0 z`#8Mv(uWiC*^mKHWqGlE0qm zYs6o?EJHrC)R%c-mW1GeH78T}RnNb;_+%2M$6-{dn5q{7di=K3ad4->fnJIR+}LG$ zPfB5l%kpNtOL8|bk@o_(>?!oeuOXmvE5mzE)Rv#Hsxmo2g_*zY(A(2R- zg2H-t1Nn9{dSiW$TbP+4NNt18{*!Kv|lPE&I7O5 zp_?r&_E+`@r&nYsn`shpR_L7H7jz0aPyA*Dkwq*=*NNCGJSmYZL_b{dsg4M{%uMFC z63YcOtn1;oZz3J%hlzV1oq&G*)9R9)`g&(3xB}|ws^k{l;;Il9$+80xbuc>eoZ@waUA(A`DmdhAUFNzG= ztD7rl?F}q(z#kb1gm&BhfA*Opf+sVgLh+Wk>nP}p`C4V^5pPy=cFW{A{-vqbG0T2! zyY-8xWWqkeAoGN*rVS{>wrKf|6FefVz3&%0SrK&+pILgVStn;&;OPDchyFng6q z^8aCL16w#e6LZNxUEZ5p;XE7$s+mopip&%Q&?X2vxBV%ty(X)9-hWyq_AmunY z+H7$SJ<9~CrCEW7JPLIl0ZT#>UbuP0e%pq)(SeI0iPz8zWOC%x9) zg!aLz!UqoHXJbA)?b}3X?(*?FjCVgo^95VAYe{+FeRExnRQIWKaWJ?Ct8<-8W#^`y zMibMT*4Wy()45tc#l1szVhpi z>Hpr=_CsrQVoZxJLEK)Sn3kErOw54an7jgwD2R1R$2FKMf+{%azja%r7F>AOS=-@z zDN8?6(=D75;XxDwUg4YHHaVshY`f6$dnA6e(XOYJW=Oi6yu-%}ZqRM4b^gdXOQ+q= zlpO^=O%hX!2EdlGkq*-lgI$k=m_wD;%Ke;|yC86RM0U!8inanddsAC|>{UG* zR;uWIs(&6FNl17tAepY)_Q$t3tBW726rQ8{WcJXdFPi9(d!>5Q-0zlmv@LojcX;m4 zn_rArjT?PKA#jdnqdH<|-HeJ;K82^1tNB0d(fef}+OEE%fe=Z^A0&=q65)FymUn+= zjom3Piv>&#rJ+0Ko*60vui`YOyH&;oUFu%kn^wG^6jPSEk8nZjb_$D{A&YljiB3UlIQ5lV zE*~sScwZZ`^EqYnxZBU?7njI>QI)}frFbr39haimn;Yk00{S*ByR%Bb#j?U#nr>YV z8d~6bj~8#~TwGhTewiZ!h4sCm!5+C5T1$m14jXA|Gnfy!AzKrX&QjT8iL-ae_7~Tq z$UHccH-J`poL0RugJX(eL#JkUe`6*~i5uxW2MY+vlR@gXJI@?UnLm8<5d$7bXe67m zSmYI%-rtu0GZOKoa9Ee~#5?G}QvO>1 z=e^s^b&S1JGyZR@^l4gHhm3JphfHqE8wtr&C9n=sf&@a4pI&o-EI9iA-Z-}v*0R!o^ zb9%t{n@wE2bZDf#oDYb@`Ql0EB=INk1KOtpPHc;{vJi-IX#O2DqF4&Shd9vY*{Omu zt+1zX@}(&rM&590e?I6(Nh>myLmd2Y;AI(;FVp2{>G(aR`NWizW|re5%(vW_#$ym}CU^z<)T@+Ix3QMZzg)fk>4X>+}RgDPFND z)r#|_vR2o6f=7RFN$`>|dEA!ODStKQNY32V58%tk@z`vN@!;qLBqmSA%TZ|HudVN- zJo$}%O^B=Y4#Ra?Ss*|wY8fqyU8HiW-hE+%`UFy-@!0%jFW=G$h!0K&0_>4h`MX;1 z@A#0B+@?xzChWUaAdt)aE?!W!c(JUTGhnK_?!R>oK`@VMiSsg%1SeBDvcO<#WGSC1 zMKwDObbI*`vzE-kqatZYq1_zD5pS+i<}!h?20MEcjN{{~E%E;hwo^h**=Sfo)-R<- z8I!{^n19cyEiWgqh?Y?@;P;QbVCy|=OO>wimj|x@h`FnIV+LI)CsIJ^dpXDdd8fq1 zp}amgf;I6&c$@cUhTbTeEmT}R2EJz+H$Em=a2;sxoC3P3kc!kFFO10qXv4a6m-WIL z4NL#~BShC#iI73Kdfm0pN`{JmfYv&(3d;o^J}+rA0?LxskU_m z;egwcW>7N=~E+rsV77k@E90{kyweRnjf(4ZSU`l^%ol zCI)vbD=*`a#8L4P)-T1*9qJr`8Fe+Txl{&NX>)5uRG=(>BME;7I#VQ>=v<_A;K)GB&*!yNF{{*(ja64cC|nFL7w19a+m3yI z5I{sNh5twk!|*ap_qF6-Sn$6vlXmtav)#sBa}1b6JZmsWrYOYGgm7K&Z4npw9Eb~LE($QjYEG!)zt zHH-}c&Z_8k)J+e)Dci^1l!l6(=0mW1QD43^q*-y^mMOm2k0nN zb3z^m`!|!m_ecKHAAkke>uJZ`Uyw-UG@A-_E&cB)I)3;7a{x!Nnz#cbrs`$LsXc2s z1o2`C0UwjvqbRvU?@AOb>R#eYRZ2~*^j1(O=6qqNPl%ys$i?MB_Eu*@PU3^m@SgrX zTDu&RJHK{cJeeA_H6&X0%F88C<0bvpbir$U(m#`H(0w89`Rq#_$WFiigo_(LROF?q z_7Wg>ON|x35=DSVNhQj8XG_4H_N0cqklKMQ$QAA~bzCZU8MxjTq<=-8z++)RQbfrA z)%)Rg|BtD6k7v67|HpN8U8%$>NhG_jPAVZe=l~4{_ z<&@)`ryQ2!l9&v09-9nv*w|+0_xE{y{`vj>@khFCug&ZAd>rnlL0qWhCdslYc||!p zet_(cf_3iR(r-*R5Gk2dzVU5x%ESi_ZVHa^BS-m;KTDqT9RCxQN}s?#67CarM2^QZ zQnY6eA4Kspe7vb_u`Jx}b6@O}Xf?eZUhAmXAl)@4x`UaER2t?ZW(%=#G0+bW!kSUu ziwWZk;nD#ihh9t^vD;jEQeHJ~l_9OzGy|B-h@Rdp-lZ2CkgN~9kGfVG+X}@gO zI{baPn_3k$dflc`NKZDCWtpnq5`B62I62g@C~A(hdOwRb;+An)r%|K=0E+Fn-h>_G4^%7TafZnrx*=s*P z830#|Ly=~lW_QD}=UJ&Bhp)o0UmIxg7d^!=7j;Qfb~oGcOk~wt^y_VU%x;C)sC&{w zn!w-E=3uv%*!dkJvckp2?^*fu!JQOQ?L_~`b)RB;|MdCKs6D6UuiWGrJSZri8o}5% z;3+NTkAx1-Lm#`4BL7URR@_RmKGNOtAEnqs%bGdu%p-wFGj!b|8(BA z*vxRxDcs)lnrlMAv1!oFX!>52Z+kWyH3#+Vr>{u=#=U^e6d|Ai(gVm|=`r%@P>6;r zKMC^WaciPP@y_qQb74f@@!M=nFv4Zx+R?^sHhfv*uk>)_yCW0D3_d6kf4pW3eUWGR zcUb-3Xy&q(LouEp@x*+IA^OuB_b_@6BSRxx5}LBK)V0~^PtUyluCpi0OOVTtJCIS% z8dFf2&o1I7cg?b{S%o(=K~HwF3ZHzX1*hk)l-ah^kQGJPJDoaZ$B;QwKI9MlcdnEU zoU9vvqcbyG9`F3G1$j||#h|{)V*4ANBw*k2G)im&JgTpC@pf054i;@ zuX*#;^P*vRry z!E0T0hX-oxv^n~p3WS768IeOTv<|&u%nIaru(}w@NDY2vul3dU8vn57l%WsVh!?nh zzdfEha(&fgjv*6ZTH6&v76qZ&E+Y?0d&875ds&@43nMfEYPt zWvh(m`<|oVHjY(8J{SB?=J{|?vt75JN=NQ?$?{Qut_wA(9xhe1s6Tjjy|{_Q*k6E? zWbAVGQ%-zqm!IIWUIqeguj|_jiC#sS;ll>^Dr0R$&d3SqZkVrDK&04ubNV;F^B+H@ z?RQk zZQMSRCX~`2=v_b7MvF${kg{$qEiw!xW-F9cpnARk-HyRRHYzn#j!^pmvxVH@s~qlK zX`prJqG6DcjAAbPUq6ZWw;J_>GcH*=vr_1H);qU5S+lR(y&_$1+l|zA(yAS^!e0bV zcg=q;I@z=KqO;4WE%ZhgedAjFgMl*NeXw~=uu=ZI)p<6k(QVX4Lu7vvfH!w~9#9oR z&3%WKgv|pBPVxjHe1dy=3`9mih@aOm&Cw16u-^#x*PcVwLsjaqanju{`C*f>IWBxG zEg4Z0ag$Od{idph=C4NcBLhIsb55O z-3$YaAe90C?WR}%Xm|}__P;7xm{uLE&<7ELJST<3qObmSEy_0aA_eRNStGlxt-+Y9 zjFb&Tzw}h>@w2UaBAT&(mOZ1n9bKquCMDLWXn1i`t~F?!>(*R;-KHUBVFpFZ=(uAk zd8hw5A-O2q)a6lCCUV$UAzg`YM}7I_h=7$gkMue`8|4#tD9Kalo|x@tn0Q^tbHr}s z_diH!F<1RWO?v(C_{aUQ8r^~1*CAhhb{^aQ(+1r?F2i*Oa;bR zl|aPqjJP~ED2Bgy~VtVK_P|1i?iA zEK9^#%}}>pE;LvPu|BS2{^nuk=gM&Iy?C+%CMI%G$N!mgq`uw$1gyfPN7TmJBjc4{gAelNU{g-ZSL8=^L%?Fli# zAY|eqdvD&Q#G4g?x!y9s=2>T!kb+d?_qwv6shoEl*YFE1hT|D|B-y87IWiV^>Lx>8 zUQwAz*$w-+#y^UBwhXA}`b+A%BX1XVBgoxOye7l^7-Ww-U_ya0=}%RY>E9y%^g~SS zJioAyr?l=Z-nSLODqgXp5zck&H_X0YGIIzhd2Cpe7ndp_Df;Zv$3yjsUmi!@oT4u@wLXv&+mLs`#wK@0qZs zOJe7ategIvm>A)LN@zCDo2{^Xv44+8w!b@}{!C-J70M1T?t+y9;s`is#|aA>O5P!? z`J_=cB$2D5;F}d?SnX$!S?Kl1f3;#&FUMz#%J26$Tg`(2XCP%S(so6#4qtpCE)$l% z`CxRi{bSj+dQ4uJ&)OKrduH^xeZEtn>QGC(uflW=N1#rfag7#T_%`a&E6wogt6t$L z6nGZ{K_+*F$vAXedj0JBq02{Vdjihxn2R+&(8Cj|Igr)W$@Mz>i8}`LMJqOb$%r~h zX8X|eBh)UQ#TawERq=U^Fv85Lt7FsX+M2y&C?`PJkde3cWdy`js2QZ zUP!U%R7AP`$%hs3?YIi_a$5C&XX(XI!{{9Qq$jP>IsLzj>{hJePg&TX9V@ z{)@x>>*GYBg1>&Pv(6u%a_Mp|bQ>m$^cv%W|wbeQ<-^#nht}nPTf67;ZWs7A&A_)RF$J#p2s{&-*_Oe%-8IF9<>Bm~S_NT_DV% zOwz=?J5TRF8+p!5og2F6`le)PqZ$%tUAUBu%;cnkp ze(FEh6JGPF>oM4iqG%SDh&~X(bj1(Up?~+OIo)}mAu-Zwc``GXCLn5XnFzSc1oMU? zZsPFh_HVm69O5E6AWs)fg7A->TalZEpb@pKjnn1J$dmk2OC24EJz5nL5^dJo+l%TN zDie8noiQo@%A7e@jU$W7eWPN#vU0BpNu5$cIV}bUMv&;y)3uPd?n{I5n$drhk=>dEexA&NMtT2z z`It^*{^AOnSaJ8f-@Kbj0TI7JhZdWoRBPXN@G8zy`vEi%EKY#!j^arCezfYxc=7#N zTfdqxI$_Vpr;3t4`rHT(sp|KA!GFy+*kA}D*?K0_Xl|C1=Cdf0Z z%?&&073rq9<3A1KezqgF%)Pa=s^wu$*w3TWFAQ5XF-A^rOKq94(MP8U#Q_XI_CXr% zi4+o{VaML=1WS%qHCBIIATOs(a$#lef3aglKkw0<@wHx*jdO$zxCVX}+A=|ty zP?o<}u6g|UHdtN#ZUuwXhjRf&#$P8=4pHUILbi8y2hflH8}(R?C*#S%acdX)IJbMn zx(SMD5e&I_VnqCN)SY(s9N`Eg&i>?a`m?BVo9OTuH=l5>^(EHl`+xZ*pW@nYlYjY6nLy=NjEnnCU|a_%np^VhUI(w|7>R5i3oAxq8oQ#({(lK}pGvN_+A;lC z?v>vzy_K(Q3^13L3s)_F@zYRP@QRnDb%4NKs%lo!0y{5N8kh|zV}Lt_1RK9*XUu3V zdV{pK>`A%E@RnL=>k%0w+wREC<8_?Avp)BJ++*oJg3u|I7I~7=`FHL++vKOZ$~V?6 zDY?N+Oump@IiI~%^P=+TUpZqgvvpJ}>&wnDhNxGB|h0A2@CEO;zh9ssZum zVzuYQ)8o@CO%o16tZ|NQhllRIYAzx85`eM5LHCr)5#w45wQip7Wn3Bv6H0SA+^<{f zZ%a5jXw9`y=tU;~9+Y|h2e$FZS_s-r3WkV!EiEM3;&}S<&k1kY+_ddnqB*#o(nO+D zZqgFBao}MwvKedOj5R%la_S2CDe3cF>y+n-AEU{}6Cp3$ z6#1?*@+STlJ2BeBryuuek5DywG-RgL?%w_Fs>Y!&d+S?|?AZk{`4<)TMydy5U_kO* ze#^cCw2L`=r#NH&1UN!F@04EW&G6PDr_$Xz)LJXDO_%b3!Bx4Q zh&;f5O~;Fhs#i@>Uwr*~VoJ=glr-wBkj^T9SMWVH5?~Smu_8+Rh1@!n!D9ITWMt=1 z!R`Ce#&`bDkLg;ul^Bu8DSZ?sMioW@=7vCuV9mcq>tx@f`er+J?oH>NvAGM&)LoH8 zQA+tbox!ftK-PCfw9k-Na4;Dejb$uB1}HBg5zQ7ZRJW;+r_mBw@<k@RVLC7mS zdJ7OUMdw}x#0IZOB1kRXIqp6qU+pE7hJf$7BPVIWXMZL8E@=tAOPs^2H^f&ne^$Jq zGfW&o8_1E12b{D4`xD&0o31vi>zXhN$4f-9XyWb|`n(j5a9TN1l`OK@?j_;CH~`I0 zbOV<1J7!~%>j=oS-jWve<5 zzG!C^S%zrk9zKp)zyu3sFmHLxE?+lSVX~zaDN{^ovw42zDEJ%%u5x_zhQ7 zuuZ3&IUBI->z*{%Ioo#qe^DqvZggQwEf2Lc8Zff5`wPnfYe!5*VLQZ|w9ide@16nL z2U#dY37a-tT{2s5BT+Ucp)1mmA*qrSf1|fXj2~;qD4nWk@(2%Ws%iP6WfDW`@Vi`4 z3Ks5&K}C4NgM!5G;t1jfuDV%vRC^A;_L)W5|ZoiO%?X;lb=aIjxQ=YI2>eb znyp(}2crWZ213tS&8y}55y)X_69FKXpx7skZLLLf$6 zw|awG$D!TQN|)NIgpoy(Lcf z=W6wwz(`792!gY@*R>H8GC(8;q#4Bc{O7!(GF#y=1dL(2mfvWFr;sCckS%O)v0^RH zmLGp?3Sjpghjgrl>bFQUN**{MJitZ@)*pgU6oVz-Q)tY&kzKW7bN;I`ewfO)!&ImG zbMLxk@dTG(A(G@1t`|uFD4vA`W{q;19uAyMqMq#|;qWk7a_bd1^XI?<-D@v^*@5Y#xT?g+f^dDO=22O|%LvagCOZGz(WYe60`uUuv zm;)uHuVf3GF0xy4tJ}@i*i0^CQ5b^ob09)@`_T5Cp^il?X-b%qEJ@kw__UMGky?un zx%Wb=>VifQftC{HPQt=Ish$CXe@uA=1bawDv!^$I0_qE?0oZ1cjE(!@I9Qg!*ClLq z_=?;IvjJc1@6D)PBOri>8yxt4DLfm+NRdE_8HlML$g^Y*uq$*Gt#&|dslZL&bO^@E zTqLYhmc^c8MlpHml7rY1so8lgtA6&!;lXi_Y9}FesdwGEipNIejc_vo99#8r&K15kq%TrHiOYMY!zfwz%GbKsbkS?&Bh)MtZSglZ=c z5YMv^@?5LP_rqp;W?gTsKskE{9=BW+&?i)IIdn-Da10>qyJ}d7Vx5}>(HKFf1!V~q zY}a{GJ0@9p{@1=!68M?#>^GXW)1yDM34H5ntKwUnatoi_`>()FEBqY)A+EBu3_w)> z4L#yhpT#GZzUYi;A7A%Syto*M?G^Kgbfvx&+7|tnuhPU%VY^XSiwsFQzn1sABrC>} zH|oU?EEsJ^EgS?0sFaWh)QH#==~3VrEtU1t6*$=!mOTh!1HC|VG}sV;e7Qj{ph18^ zg7pe5p||wDZg$bCDrWP*w;cY6!dkE12sMJ9;c<(1X-Np8N@e*Z!f)Yx4;Fr-1BlASu;+S`eg)-wn)Z3&O z*H_WUinTTORy7til^X(tT6sNqHG(}meYj(24$O-;DlSSr2*s#QG4J#6S|EXPopPiV zwrfKgGVnAc6w%{nycJXlJ&3`UKY}@)Fb0(a{m9D>qF+6DN>%V?^ z%P25XP}l{{f%?>K{=e}a1bobf3i(Ts%td0QDfg3mrV3y=MMJOgOq}Ee=9g1IVob7qAVb@__xU6}@NMiBpV>(X2V+eLG9?9DD+=RgwCBd(_ zrOGB29lYctqV$o;In1<_K!>L-J;+}VK!x-G;v;5KT32=|bj8GE)2SHQ7bpqvkME$d zs>?1nylDm8@@xc63@g002U<0@$}! z|B)IgC%qpZ23B;QV#`@z_g?WZvUNr9UkU3ei+}v9H%tWMt75MvcHio}1?W7#9c=DZ ztJ;*hc#FAWA)7nw0+@0U6ytN^nCqaBsB&1yU$Yxxe*q@I{bWe=GbMxOS|?N~iCnx+ z0&JL(PY)^=FB$Zjnpa|-Bh_O~GENMEU|rEp;;j!6Wp@V6&VTf^-+a`imC=ng$IHA* zP zBf!0=f-5r9ecusUoO*5C3T+}vD`4bG36+{NjT?u1Ld=wOaI-hdxPhg!)wXWDgOWmz z$I%7DD`bbFatW?I;3O?YT~o<&3%bi8A2=myf^1)pyas6|cAmPJkjTFE_t0GD&3E~- z7MG($4Azo)Rb)A7Zv^^!Q{#G_fH3XJWb!vR*LrZN`jMfj=6~s0{jLXi$LZ2Y2F|9C zn>t32rU7Sgg%Kd5wP7QT||cWlC$H_Q`}^1%D*x=1Obw#KmM z({kk2zZTPd#+!}4#6ia>bJ+T?dbxYVf75}kIa^F+ZLScNo{R4NBtc<0ip80X$k;3JrP1Z4gNadhJc9Ud3DYoXPqsVzXw-bX ze&5b>WA&PHDUw<%!s4re8G56v8iva;Z!Lk0&NgpUOHdt)iz$>D*{5c)mHdN_iH-wn zG4KA;SlL}O|DE=2{qlPg^=Zu9Jw+jk6-Q4?r;D5|)2_G=eK2ku)UHGgjz*>YyAv}? zKh)%~9{9p-52eS~uJ7!3_@4Bs-urALyTHwdHpy^d68ZAy*HHux_A9PH+nG2Tb?11n zgWMCkDs)3m%8c)WEoo|k$dj%Z7IqE6k>eIhnY?}3&zk7#q*W2pJobMw{U`CzMWl_9 zbTbga$o};(1s@}z>df-@NDmR=PJ{R_>jmp59DK-=(;v(gA1+cJvzkP;eyrz6{Dn}; zXR|6kn(Bf2)i&tkaI-`0B~Om`ifGX+noCDuunn7SqeThOWT)RlR8qT&-qGIv*&>()F{E-;j>xjTYOEno-@m^gwgBoZrnJ4BUw!W zqyjp&3Bq-h?o!wGZu=u*aMk7M_Pf+GW{=Luh#zgCgYZE}8;uo-k=^lj#_#lH$f1M| z4_4#brmk7`nVZ+0zq{bPl_N$$y$U(K&8H)B#OEdB0OHepq>ZLh#{63czjAw}F>#%+ zUumh@$iCX#7Vo383fO$0U4s$Fi#8zeEDP-1h=1u`ZS@p0kg4w>PU-$DmS4 zezJg1T2~xPxUiYCN(Bx?Zs>DU1emnNwm7nt`F~XkJ0_6I7OOd&6Ss(ZhsPt__}H`G z7%ks9D{*T}nsLc^y2Q`Z>TO%(E@Cml&GHt73T32OyDB+D?A4uM`W$h++XG&Wi&$M;h^2W;oe}R9wB8l7j?ph(-a|LemAIlI1r=VF2QDIIo?-`2V zyJw6vw@xEiM1eGdLyB5Fz1m!vXAG*UfCuVtjD@M(4-9A}mtq?bZ`JF)L#0S84mvx) zS*ie)bQcQCKwEF(bjfRsEHtW^EJ#7chiAtS;7k%<^ca^+ob`gPThMiW$fGsK2s+Yw z({Gy+lyLYcszs`ff`Yw)?*kePI};&Ap!p8ZejSU(7n3;VpSlfNz!&>&OuUo5LT!Ts zLuN`><9(4uUNMv2CIwh}6)GttpR4xB2F_t3CQLL$PVbJzKM-4KNSV0bCb!EN*5kc|K=lJmY^d zagRl@H<<)pLUsiPG)kZ7)ZDu?0$Q4_W^Sd_;i5?VS=7>M_s)Z_w;!Ct4>A>%pevK; zn%5m3sQSZoHlxonmV;+CZ}zhJrFMJkQDq)1%0dPAp`ApvomxB{Zt~;Y+3U~#l;++O zq1v}ntV@HZqdYJh@lE&_uu?30t^_qQGKITYDr zG!woQwXu?vHsMxNQ+JNC-sS(tn{BlD=oCoJmXzE$qU<4)&H1L4TQ;xR|6I@$HPdAY zqKieQoRyU%F{r&yUn2+nW*|G!aq0($EJ!r8^55{})cT*I`>$pp0=Ah#%w;1Ur_D%$ z?XXcK^51pdao8~vxWXQNQ|(w#*XCu5A7Mx2z&2yFRa}n`W*(ajn&@#}tj;~3HB)oi z-c%PHCTe=GM~sl(_pA7RcP5zf6Fm!@0kNl~7^&kbM(|tc{!uE~D0y6x!5aUP1t}ej zM(`DLXVH<{i<5I8r$&PT(a(Kk1bqo8+ai%2^+e<|AtMlh!v(Ym=g9*SScWJL$g~9cNj0?kmg3w|}bG+dj)0_4;BT`085I2zPEe3znp} zYeFfvh~l4kbj(?sv2 zClEea7jZENb|E_cahY*Z;*GBUmWk!4ZDt+PEYJA`}KC09c$Enx!N(K z@(^=S6EPeRa~=%22~n8ZNFVv3%LgnG8xvHf+gh|pb3xeu5*GQ(H4@5Z8A&e|Bl=tM z$yT+`k!#lzjxgsSKOO_BpNZ#zGU~l-Z5jF`7zG?EVX7efw-&Z9WNCQOuGhC2H`?-X zsB%p}Fr5|!pj_^&IjHt$4Y3+s%n1u8-K!CICVItUbWf_^jKg!W@+ZOJalaP>TF0~{>rI6!fKm~Muu`-e^pdWbZAZCv4+O(5F2x} z?C9p^@!DU{jt;~rP!TS7L&cU2$c%;iZYS6ZAz6cMw@X=EriR*m&0XUWg|(@bb*l}r zc6G);uMx!k6s;P8aCf3~>2nB@3&KrG$tO?1D7I#ZH8E^Rss_s~C-PIdo@+(3THChZ z#az!6h3HpYTK!*5vah3rSOy^c2&M1J&Mxr23>P%uf9`^}A%3v=(iYGp=znC@X%1pM zFVW8U`q0iS)=l0TMidT)`Q0XI7d#->1;5GjH5}2V#)|Re%Q1Q%rC_~tKzxg>xQsc# zno3`IpX2Txy3NRF^^9fld-@>ZE>a}|b)FrNQMjba8C;AiA0hbUT*sy$j~EecCPR;A zvj|&ac9|%AU_AJ;B^zMnd;z{5DnaH+LWmW}cQNPqd-NN4A+A1nD3=>dgfZ4}oZgN# zu#HNT9u8W)o_F!MQ2URMyTgki+en7`&LgM=a z2I$3E?Lvi$Lm6G&`NjW}`8W^$;=mE4KLR(=VG{0Q2Uqq;td@2&mXA)6Y4j zOEx_%nYzKnMR)bfbp^rYz_O>@-ZBpOoq;geAj0Zrj4}w-NMjMYPkSHmwtEDe6GEJf z)&xmFr0gC}mXi)knVTqVC+7H2D4%=M{my;-#keZps<(hJi>ocY$TK%p=n%&St-P@bnb3I`(Ck+6}PP&I6iS2fdG%D*duHQ1^Xm zraEL5b2rjJI$y3-efU69hHjnjY&9R4YYBHsn~;`;ySfWX>~Jp%9mkD z=28*(pgL|oxCsLfn2dVNWKzh2OW=fm8Zi=hPznm#o_RgN2vU6rHoJdNNMANFS`tucd_jPXM*uoap<-9Pd85ah;izzYr2$18eAvE;3vaWn^-HX)LRIKWRwe9j`} zI8mCfPM zEXg{Z>}Nl6ye9yOB1RUJdOZ#raH4b;tl&l3rTtUV3Y>Q(LosDtR%L5C6`bRk4&gR% z^$rp8I<0^COds+2JFd-Q|4@KDUIJroy)vjrwBE(QM3`Sf!A6$6FniKpg)W$ci$g%0 zf;tba!gn2#o$*d=_NKx1N1&})tMTOz!bQi2B1OkP6!<#*t@=(Tq-{a34+-#)r^V<_ zmO4zU_eAq$(=&koV5YrScRr%TgSEEp{l^TpeDk$yy4Ud^r%Ce>s<|I)#nEz=k`DScy<<4~-|fLf3Pfk-yk(Cn#F5RC#D^q}gqf14uzzFD zv+z9c;lnMIKVf6j8vJ(wA@28VEP04$=*uH%A(EDq6EY_jECJ%Wa$bZKTk^5~G@V2@ zSvLBGR`Zt9CFM=k`NH6!)AbM)1qE5eaw^1ZCY?OGMNoUR)>`mJaVeN+{MDkR29C!; z6Zj`N=f8S^*5d&G9b7)gKB=8_a@Jd!Yo?c4Z4_gVQiGoxsrG>hln6X)?Z1pY)iw7# zJbut+&8Xv7W9eJ7CZ+g3!W4+J!AY`7@-gt?oU6_Dd&CWc3nOgh=LWFdzRFSEwIwdo zOW~UcpCRs{3(S?Qtf)j65GkgR2qRV8xbk%4Xsg3#S=@kBjeW~!+-|i5pFR zu!-=lC+klj>X(%B%t7;N&B0BsjpC#`{-}s(kA>1KSDd{p;%9ABt$+|swn7_5i!6K6 zALACaFo_n(+*8{NQBjhF6U|U<$b&%=>$RkfTnno#+tjMJ_AHXhyT-E9$L*fA9->vt zT^a9{O8`6D2c2D-Fmy`wfl_Q^jkJja(*+x0^q7c-CB_Djq)Jn7R4o%$v?>Ql^3G%r;04ylSH@sMHhUWPcj|0PQv16@Cf@C(O$-Hoczx8C(C zC1s8$yGqy-qsc*kxN;LAsYbw{*`u z=Iw!pDR=3z6Q8?dFQG7QM`Dj}K=vMS(3xF|C{KJ=EkB#jjj=f*|8Bm-jXK`=X)V!# zBCGClY$ZRS2{kymrG0E0bW1$(dl`|^UC(`Juemc*S?-0f9y=UVGZxxBm1@!ZhHf$X?ZY3j@a$a_DkeY)sLd9T@tp>+I-kvw*{n|P2TB8 z!Qh17{2lsX!Dyx5#K9mmq^NxmesbO4qw9whC9bM((*PLV)y08FmrrR-)kmmkU@72% zzb4S&b@JZ@vubBFItlxBnB`rFRTL>+2Xp#)=>As)FHP4gK4IR`3FlXXu)9WR)`0E( z?M!Ge)VXiNTHPVB_7R?-Y~H%O~!h;V<7SyK4I(9&mcJ9Wyx{ zm+;=2V|5i_tChG=!p05}JmwOX7Dxof2S$1VmLKxekUB{X&{2JUJf8YfK@WTuCPenb zx4xK--Dgb>pFD0kvumUO3h4%114mX0SUhHrFwN^wG42m}#q37`iMX3QSvo5J8dpZbmE;=FrYfM2ymPDQ*C$Gt#h_yxxfDK^m|RSw(`yjFRn}Nj>PsdXXnCr zc)RVfdDlVM&C!VlQ=Se7R`*3a^iD&+jKo|m_4%?b z)hDUVj|f0-~IAa-gJt`|66AYdS|rhE(pIw@%O{@encc32I)J7kz~*}IrGrGdjM#t6 z*R5q)XN(@!%7d4E@1pIHF>9|xBj^DCn)X@9KYRj!DB4-XREzD4`!n?lfSt9zv-w54 zVY4e&Ny*%;g&VUebL`oHM%D-V?}m>m;-%ieVgr!t18Kc$J0jBh=ZQC~f-@TNhR=2} zWE83V>aWZ+Kas^yKw!_E*Q>m3$xv@+t2kE+8JHDoj<@>FC!whO7csv!X$opk@3xfrwW?u|C7o0Mt_zl905j8Mhqii_w~+* z?wWF!tW)1=U2Kj>a(0CKc{!F}Ogp$U-AeYwn;@aPlaP>4BRy@MNGxT%Ge2#31ITYqBU=EifF zF=CaHRgZ-F4!c1SZ>+7@Y3Y1ysjK^fzj-*zO#OXYXY6GdA_^aIKY0^&o~Ta13ReBr z88(mSOY}e0O^r;L-YLjv5I}1$7gX=zk;_(uk~zIT{X7`40cO)Y3=?=uQ7yNS!V~|K z84@C)49Y}yJnR=iz^U+rhR!?UoK@W0&zF%LVKn6(=ef$C3l|!Dc1q0(mBoqff4XjiTMpm&Gb5 zz8VOM>@kp(jr!R;Rle}6T-@AE^`T_@E}6u3eg4*5H3Yyf8Q@-QsRj(Q;(N|I_x|1* zo~~=eWnzNY(sEIV#W?8>4Uy+nI^mg9*0O*ue~pGZB-X<_qjtGWxyTrxw|J@st-@%S zUpA0aw|NtYcIb7&@6^3>ju0gy)Nj)v9aeG~!52DosI*2DbAS7Nw43+y?_TGgb(tfE z*k{sF%OeP@j)<((AE??w@x!oVG}Nl^B0wW6a0ws-RM4ZHBJpk>zBg3qc3ww~Ws*Y2 zwVI|pF6u#Qp?-Zjgo3zX=O_mLy9O#_KF4$nDZFP;7Ny2!xAD_KiM`G~Md^~1aJn&v zkp>nc;u%L1v!PENeCNj$(#J4v_$umhEL?ua&3w7&KkhZyU+ZoCKJEGf+qu2hyl-% zHgKs>b+M^POAKpbyEPRpWlNhrH8!wF&$P67)Ft5y;_B6J2WB2chcVIRJ!i{F(9}C{ z`JbJX{*XKFRS{;#_Y@~}c?2u;gq%||+^;F2D<*f<$9K2~YG@bIORj<7#)mbT$6L>S zacKmy2OCXAu2=X&LnX6|H8wUqLRaPjp-y&1hehGp^J7Z@oJ6zu=wW@(74kRUXhdxJ z>Fzn6Ti!sB@zH;kB8_@*^bXz^2@ZJ_IpPNU!VXMc)jlmY{bLNzLd9v#Z~ce$>ji05 zII~o|)A3bqN?7kBY#n7qh3&vg=f5iDQ)vd`^{EQRSY7OIQ$^AaR$&5MY^9&4|JrnO zm(~8Gum8s79v-aMe3Kbk`?iC!qM>1KWYH>SbIky^0%{4*w>du}Rmf6gD?QYXrtOFA z%+xd>FMZyBP|;hGmEo$xOTF1OpXpd`_dDyYLwwtql_2^BAFY(u=~1&>lXmQ86A06` zlB?8Qnnr;&`&-(r4xb;dA30yL`W2g%{0W{X^rqkn?+dzJlt_VK&p^f@Y=RMg^8;rR zaFvU}zT>GwCN39-Bv~u6zir^vLdb*zMA>Y1pTpnTmQrEfvF zheB{B-H#SD`p^mI76)oez2uo{;6GDYJ%;3s&%M|PR97x6V{WY5FWsZcI3N-PXSTG~ zC;wlb!&}5=#ebf-pX1YWyObM{Z^hNvyW38r@3Bk?UyWQ#D?XG+QK1_Q+;Qsij+Pmz z&}r=gZw>W^F1}?{lPq#MM=JgD*-FGG0gt=jkVWJvSHJKbvF3@RJ%`Sk)iajPlWmKh5he9t)*a9cfH{eqi)gkdxqur&$TSsh*ewOtxPlR+kSO6g!R`)KREX(5XOy8F zW*U+(i!N4LblbNAc~F7xBq*W!dw&Ra*ml`x!D_(u&uRl`YxUFUUuWkY&u#W>{nSG= zw(T~24SGY@ zb$b?KZ7iNe2l++yUT&@dOsCrHJFCVAE8qthY$DjTI;6j~i4S#3^{sL6vo_RRTD`Wq z(yOq>jWuw%=8Ongi$KV|^=eGfHkJRkol@bvBZ;E_9oQeX3t}ArLkwh~E+{K`a}j31 ze8tg~O?z8W`2nM)J{avBM&gCG{ZHl}jZY+4S`-6|)1jeuU#(n%`Kh`VWgRfKGC$*A z4-eX6WUTT{W23m2M?a8YdX<#^@6|Zg92>m^x#)PAVw>r5_4hICtG5xtl$q#Y5~ad3 zu4A_3HgCEs`gS&)X_UO)v|7pandW_2`)HmrVc1{o2wg_`B?{3(K4fTh5mw=R-!z8D{ z6#Jt?##0MNbJKlJwsjjH5rmN_Y+8%8!L@*oiFL_8%~hGW*oQHOUYYchGss#q?Nf;& zhhfxIuyg3*yoWPUB#xZ)))MRy6TW@qom|1(DKrOZrMa;t!BmTS{g?gse!Pl2z!66- zF2V9{H6^VFzG0-Op-C8O7Tv7?VGgGquc+)_G+o7@NsrMlGmLMx4<`->yN&t9<4LF= zX_Fh;Upv>YEZIaXN+25sk^ofOA=teuC~w@hUeS5}X4{4zcE81u1r6z*SOuySW~MQ_ zl}Z+ccQ$V!@02l1p)dl)d(hGyz0soEF&=gNGPY*N=A#%HzUwE{gQZ#*XS!68pwyNy z7^79Z_wTHSGcH%t8~rCWoO9#PgWE&s~wNl^&^u595ypMTeS)kUqd{-Ox0_z09S+QzJLF@P3=~v|yldzcn}6 zrg8s${g2$FX9*KyOsa(Ka^U|=%)gv)7GLAmjvjHX>Ra)Tg%+&smEmcI}mCdcvV?;cDYay zGuO1Z6X=m{@sMaF!d{d{NtG5TJrC$CZV-teWvV5n+GnD^A50v<3TjQIQ24CUxByca_{6gJ*YY>qobo z@{5TrrO7I1%6M1GT5qm#`eL$X5;5jBg9j4amm)Map!7wATa4*|d|%gUVbHO|WU0A0 zZI`59H|-H@D=^?cm~{3>u}1s)?SiFJwSFxcYTP+syh5?Hk8)izGMa#8h;`G0c(y$$ z!v4bzhfd$>qy^ioDJ6x4E&3S{az+=Io88|ZsFtT+`*Y4Y83oST3Jyu>12ma)GBKUE zsTeGJ5=dNcTNLDmG45a2{^oN1L{-g3noOg}h@lodA)xE``kxwoEB;nKKY?xu6d(zt zL4t0H^NEc4Pqch$v=W=kIO04=_ zO`ANYH-%C1PKHi`2A%|INjLhTeIyH$daJ`?3*XHTc@g|G~c@NRO`2h>(2~%tQ2oCV{tL9p2=I6lv4YXL3xDPBKCAgW|Km88mC{k z^;_!QNe0d!&aRQ5)lfj9IuDpo9p@?>){P{OPF9M7kk~}E*t)9O{kX9i$90uraYGIj5vcx+!%TD_0u>`QH>JowJY8zPNqX0JMCEe%O!R%U zPR1N)DVTgu7Lj04lHZHgNF$*6D5v$7gHlJ3Vgsn;VOi1!$fhL+7nQajTf2D@ilG;t z5PKIpPes}asC4kWm`B}jgzfjMwNe}C0-i5wV1ry8jMM_8uh{-r^H?zSP;e@2jTe7q z(tDC6L3Hqu69rwN_9d2D-+}sOCK|PQYtcwUGxecEL*C)S+}hau{wC;8BT!r!iVVL%vUWOSdpq(F^zfAqHx|3 z?B+n)XCh%Avd5d0y5`Hxc$xy|uWgK);*S5v~ zpkhEZsB}d*uS!5^+w-OJda^$g5*9UQdHaOw4d8RZ79g=ZmY^$rznX5OJYR@6lupk+ z6p*7@OTx9X?vduMLE>#|OE+A*5+YHTUJwQ#(D z?Yd%*PDXbUyz0b%q3Ve;lEQY5zo6v;lwyb&RpNHZNPp|!)?hp2l1TQ65Xj>T>6(=$ zV*uMVQ8+a?moWeeV>}wzE+%)m%|1Bw)jjM@6Q->#qdxEJgVT>+jVPs`7Ycux&%2su zll(m?eO0FQYS6R24#{j=8x zL-qc|TpO^@Y3QMz5eYe?xZY#kR^W?0Xccs)zRq?eXpJmwh_7M5k_P1WBNF(eW}&N|^Tl zn}XTG!AZ^hQZ#EBHcHW94|w zb>o)RncB>d@_ePCMpC@v*u!q;0Hlyj^c=P%UOds5*XvEIx*66B3FF?6gy-CcC>I}i zY)OgdcH7bS+QFv1ykB_LOYwhsYiY|l!^+R{HYFK(;%H_@A6Av*4TSATFu7b_asDyJ zgj)-gtLz6}vB0KOmX?C$S?H+t6!jZMj)Au=$ce#~#EWF~Zf%kJBK-|;TFO_+Y)#(VOB{&0+QrrC@59 zY9%p+JC^=o|D;~zSBIX3F1cvj>vR`?vEMLnmb=ynD8e-s*}&Ga;W-2NgW>Q@zlU$G zeR;Z1340A50ZNPB5%R}=dqsj@G1vmIIrNDWhfD80W8NZF;Zm$^7tRwi^6fdBUiHs) zC)cb<5+Q`&lU{GeRKl!0De2aCHMV00JiYBXp@jVirV(fTa5eaJ&Sf2BZWg);^FA5uO`<<7&P;)OAqXD<&=9j zQ`Eb?EDLwFUa8+jhqaB*m`N1%Z2Y2vKk=MeZio7S*yd;u+VVabgZO$*N??M*8CCZHa~jA`VB$s+OcpH`p7Kd^uw4bny`0?x#R`Z`8vs3B}LV z^(5Oq#xLlHh}(}gATi1?na0+8?JV9+Zu>B=;Pa-k-@gylxgPo6#Bh!au0ixK_L@-} zkoLsXD!!*{)RTbPcptZF8y|P={}&{=c}*&txGIa`#fS+t1DmLgYID+JBJU{$7efv zLSjgscWTOvSeN*{HW-+5F-~VTd>#Y#!x^6Ei6G;vJs98vWG{e=7PRHB)U^dKS{K3c zu@P+&j!2nwv!W?QIal^UevozodjV*wWYsFGIm|Xb(DN)B4Ou?5p9qosJiYCM&}jE3 z_=&>h#qz@4P;%qkjO_w%>S)?LMYO+ETfBv8ItR!Jk5J=-#jux~PQPeD?TOuk*1$h9~#vj6|2c z0G1DpR>?;ulyt5Ao}L^NzaBUnMuXZj6Z|4?^CN#NQ+0L5lNQ_d1CePVkTLM`!Ijon zNdhBy-~>F(aHEGOcSc{-)eAO_k3M3KG0-VEz0iW0LL$5&d;_$&`GL~$7iBU6q6(MLM&9e>9h}3`GDJ{ZgnyH#f4=f; zKWQu}JB9+VBuA;DLd;fetY-D2{Uix2@@;F{H{bXSW_L@(&4LU?s{}%EL?1aEbQ*v1 zel*g!-mig8j_gbngCIG{zLoAJ_%wVejYsLsN9EZCJ>c&I4x?oLpTOJUGOY%b*nzp6 zPd+;mf&bMn{N^++KltzDxDJWE1|=xVM8)`YxMDN^g6A6Spi}VQ9+=l)9;9oo$y$_l z|MVoP0=4eo=XAu84*{*|76(2H&baGnWYOdowM9sx{h?uod}NPC9oa%gu9X}oow^bs z+T{N?MBO6f3fW|&msHTTtvBq=S;8qD1=!K?u>eN=mh%Cly{4Y&`0MwPqZ2p7np6G& z1Gv2u#ATsqeXNoCD`PV9VKShMw9ZC{HC}SDlnQvqZq15RK6J^$_Js7P`=l&V(br4Y z##)Zd$!c=QLRdvDzH?Fr2(Qd(zu)_#bc@}M?_nzLlm?5=n8~XS{HrG343^Waab+v= zk>fFi=NHU|A<%C3&83(0x_is{gg%sJI^PJ}hfgT1{hAGp3nIc3O) z0qkp{HI@*|CCat0+`HH84|K=%u45n8<}0N5mem-FLGvmY-3$ZnHFsm7U4G6XVX);> zGwcb+NpL2xtk%$ry>k6tzB(%-_5B`B+vGFOntJEd?@d4uHYP6IfW;M16xoTRhH%z% zDiK1mzxj3)TclHFVzRr=d17GNg#p?V`NKDc41bw8Yknkb)W6guC7|U+UKPdh6qBSI zJcbmDy>pYc;3Pg~;&KyZz6L|p-a;OWOTt$mcu3^f-x3&;i0#Dg;=uj+W}U};9eLh9 z$I}NbHKCfDZ(TKG2BIjKVVPCjt}n;GtF3@zSQmlxai4r|==NCe>Ptk>`}g&`#5~Qs zk)MO>Tw&`CF|7-8bs6HkU(fJv9H+WM%9Ls;U6Maz3}-b6G6fOF{cq|II$7-C6$CB6 zed<-@d23hN@^}aHdX*^LcWMn$_RiycPxPYc=2%qqN2jmjD{cEG4S!i!qJ3e_m^bT| zb~{<*;8JrrBTaiwDyWk;j{Xb06pLe3tzoBxCy4+mNEFYiO6hF~&Cto#d-o~}sPu=R zlTwSyX+4X9?scDky$2K%^Kx9e(cw}re1j}@ImQl`9}{xG_apYm7DHD@|3`=(S?XXz zNf7(|5YF@JuG!+iMSx&kb$sd&?Oc_T5{lM~pEFaODvftOvxAZpS>Xxjvxr6V{can_ zu_c`IDUw&$mdc-hJ)erFl$?D85r%}Ve#&xI+YQ z{u(~>miqhOxF1&NZIJ6RIlUsS`>Lft|EJ`MP4*b?6FTV$I3109@jk95>Y^v2mZLg5 zbHTttQn{*1=G8aD3Vu2h{bo4&;K$1r2XuW-x7Vk86kCLcq|_#QlmEoLkZh)QV17ua zI^|!e-X#W}gmD&=DWr3=;#uAXb@fNKD_Ekv8R?px7;h=t!icB=4VhC@gWL=Z1A$hW zBgK}U6*ouLfrA|_pt!Rb(gf z%Uvh(K-xov)|0v9{U2g^|6R~sP-svA@O}7^iK$KNN$Ut)f9sba$1xRjK=DP= zd}#Ug%>F;$%^NDA{6>lqpET%LqJR9+tZJ7sNE=EiRH8HhL!#*Nk3tKyMWi)9HdU#4 zIHBAq$2^9pVn6Z6)Mmzmkq&TfJwwYk+g}NxC#KyuR-W-G7OIorSUBP5nHIL!muJ@a z@f|Zel;KT65eqaKhONg*FYFy=50!KmZ4|t0h@=}h{HwMu2PeXoCu{TJ_e7gW`(T=QDPD&H*FDCW?WQ-*9TFaJdKY(>PwltJ8#$KO@Yl#(;hzDD& zpGl5&L{E)-oe0?pHdcuJ&9#rlG{I7TTw>&%+CAPI7lQ_cOp0tG`Q{3RglSEbp*;8D z%P9n9uKVh(vk~EZ+kq74Gw=xM$1Z5EbLWAvk#J{Ya7qIv2L(Khpz*g&z#PF$DnwY# z%NIpKUHZwvXa@(G{vSxDsqf67y9V`8?+unnKJo$CBKK3 z>@q`zO?=OjCxxtEkdX-x2c@vytkhsR0Ak!Ju2rm2UiJuN6@5ap^ zD6Kno@38Rjz(||crYDzKX*Ek=HaY3La3M+Z4P#{4LF*hKjU0x&>QEp^p#oF^s&-z* z!Yh}AEy$lf^c)UeKsdI>a0i?5`ikQt_TDxb{e=dGloD$s8y9US*T3?Ekc7bCu|lhD z|EgVbB;$M0LuOmki1#`IP0qQ!nP%P3(e6PnFg!8B7gPPU6~3lu!Yg$eR@7)K_~8Wl*FvF z`vYh-s^0b)$C@N%*usRqWt5HHXoizVRSa=9 zQ_l=&@*7+(oM%Wc{4i&KCNY5L04?Y4c)<~ca1 zo{83YEBf(%W}2Q@Qx9?!NZ8IPNs{9r3tu1W!s$1cU(JqYnx-Gg*&EWPv}E-15fRMf zl}#66X`iBY`oVQy<^x}jQd(Gj*!9Zz@U9`Fz8%hT#+f9(G$kpv!aVfEA`1@beiX65 zsOf++?5SnrW-PO#M=uHs#R* zFXyPXr$v1r)1s6_EV9VRhMj!{C4|1lq}~`?%d(9y)=qQ#&YRcLE@_1~XVwoN4yhyR zDkFFCH7erW!bTOeuD(GZlaG=`>JO-`clzFxrL_l4w$-G^mw~GX#aO7=F;x;-6U=t6 zegz5!^tVZARQJB$?)f7=0mmW4_O3+hcJKXj^z@3IS4*p743AxzPP#IUX>E>|{NQCi z5C@|i(GR4N98ziq9P1=H(~Rw*btC8E!_GDiiJ)!qMXrOyo6X)n3p&MGgdcL$dhD}B z1Fm68ml?bFJWs=Yo8H)e^|)4FzUI32Zrse9I9%_{!)jjaVke)hwjz&%V!bwik8G|V zi5!(d;_H%wn8WddyMyJi%NzxYrMV3u@V9I&QdnswT1pP`Jlw5RPnPU_?hGcay4jpy4Kop{}3_Si8Qk{ zP<3LRG3{r~R$Kec6GOkI1ECj(WFuVTt}LI03Z_h>`VreKMlT;8Hv4C`px9mK;H^lm zM;76JcYxiDEtuIl1nM{~Si1Y!X3*hCpbl=@>ae#Km>@bx*ATU!lz;r!odd6jfHpZU zH|EV~*R>#>;=~;>G#vY1wP8DJ!cMmXCoPKP&r`d~y}hYiGX*s9ImOoWai zLSg~W5SB!{G_&-3W{KZzvz@1-HCXEMLuSWiL7xfr?R{XC0@R(M-2o$TjkGqv!*`^h zw!9^Kbp((^J}#a9Z%JBd9hI&|BN&6QZCOdgUuW@BX)S;i@mw4Qf1MsXFTDVx7`7sr z9L^H@7-wyj6UUqS^T-GbfdyRL>9uRi&zrDR!F?V5o0L^CbqMyHM56LG=b3R2ldC&H z^*!-6f7KRu_UE9d z;<&VLH)XgeTCQIOFK~u(uWh};hllNo+|px?@6any(e7@6I%&r#3RlZ_NZoL)yU{;B z(k=cGR@61y^?Ol>5pDN0?uG@O#vq;cZf>}8LhHm2tcf;@iSh&kht~Jt#!h&!#nl)4 zfyp0>Kxo`=58xHNEXx2D<@xyQ?t8Z3_&4{QO_xN@@7Ly zGX|jIB4E?1+Q^ij&T0^?jG?-9UYTBZdOm|lx3-Qc{&pv~LUQ?XVB{gv?M>gEz80fK z|5d9;D0Z5(Bi=npquy2Qlm%yDj-tI2j*dHzAEznQiyyv;CshBo#q`bx`zx<{Nc@@y ziXT{8VA(dn?OhUV(ly`EBzcncsXBtIai5tTc%eOLv=1$>)7kWQ2f1Rt;B|<;-)`ok z_}Bh7YLp`xx>r4LaMc9yy0$jZIEGA|Ien}1WnR5UQ8yQLH7K|C#N=A**yvIkC{45 zVdnPNC|q|T76(v~CZFB+qBq@buZ^rM)Azf25AE4ywG4ns?7RXh403zJe5+gO?)F!> z2)$N$nylzrO14)1(YzKH{*Lkgk4TrvlNnd!Iyn1Eh*Ay%bILY~C^y6$Nvaa<=pX-O z0r89?;4Yz?Uv(p=q=}#%<9(NU#b-WNRw`d{til6O>1MTk{tJgxLoo`~Gv1AuiSjuj z^+_b#cl&^Y3<3&TKa=7dZx=Mf_jFY1hgoG5U7^y};C?xi;27DqD3oDpFLubUy}Hn0 ztxd2DjJ)(7g(1gcUDiFNXk45K6d(a+3JM^P3Kg0Dhxf$mtGR1{@)5}R9!Ff{RMKpW z@H>>xJ~tJLyN*W+z^cOgxkiNWcOxf$v(1twVjyKi*p~gjWnctmC& z;=MVp`vE%Qrk-WgXCNTFx9KBWmwgA9V4n>t!Qtx)*z5SrzEF3)lukQ)oBG-Yyvpz7 zfNgiovSVqX>=+Zj7|U0`K)Y$CmwbXZPXqxAlJp5Vs^h3#8fg?j0tEf-Z`3}X{CZ^L zp`q#9>b8q*hr(Q@bI-_SI54`rRzf~Jo6aLpB zaWj-48p}gwf0(@q3_dFsqrh|_Y0byfgo_q|{*^vLCN=+ftT7;07d{)SN?u#Kg9>cQ z>J@AhG}=-FjvP*8sdMPY_9!dk5|C}!UY&Wd|-Q7KIaE zIS|(yzcrs#cA?AzFr5L_=#@WJH_a2?b!kE!*)V@9bD}N5<$h4!RHQUe2P{!3IGVY^ zS@|`=pV#5fs4{XS(>XT%7UxEO>L)+?r>o}1CyF}VLD_0j0o&wsI*Nb^YEtY!7M4Va zWkj}2_nYtJUg-KGp5(uHxPf@Md8XPg3`3rejy|dBn*8OMxW|574)lo%uuidw@*(C| z%mmyJObA6XmrD|VB{z6ucS+Js0&W5a>G+GTEYwKt!OLonZXa!@{l~b5vh5(t9!?I7%=lNW zn)=Zo@Ju!B^04YqSPX}b*lr)BX{9lF0xQW2F@R>AnQc?E)d!uNKT0SwIF0rJU1C;R zwf?D`c9#2Jd64z+jis5Wg2%+``?M|jTfNrkE(D1<5Uu5GLSb(!TlzlzAJdlRQHdPB z(bj+_qPe%|wNKXwAE4W4O>&Ug-?;S>D<11i>RBHB?)$R{*F704u}O*_Di#tO*$CdF z?Kr0+C%e@)AoYAtWc9%$;2u4ta8Q!Fo2h#wSh@O%qKm-BCYZh0AXWg7Zl*tJCPhq! z4!!*6RffH{xAF&j%%6}sdWLbY`20oqJqk+AbF3+vlsAzQ`3>*)b00+6Zv^;YP zUaNL;nq?q^yVkPD|1S63XGkVUDX+yO_8a?QlbHTnakb=ivasI>HiPGU%|NC1lRl=R z^!xZTb2O)}K0!Y-u^P;U{vkY1HzRO;n`#Om$Sp>2kLaTdc!k6Omk;$ki8&qHNGAFnfw9da;#SA7a8n4|Q*ye}?|fC zf2G<67Yv47c7Rl}_LvlZe_Hr^;L$nQ38pv@rsN7!16fr}`o~_ouE6Sor z?+{Ll>ruf0iu~~OE|Xiex%4}^r!%ihUt)}_Vti8O^TlCUpEv}zEiq=nPTU{&=qzBe zSW-+zuEfpyo)k<{ND@;L#j;fh@vbAF;$mE^X<=eG3CK=lVwgc@>4kg*4-A{ZMyZ;> z80u$bpS7>DY0DZ=-JDjc4ThK2W_Eh1<}D&AhPQ*i1nLJ0NNItYk)pv4Yh^Oz2y8ft z>60qmrp%Q;zDD|BxS!qwTuTI)6+dJoHTf@+dzpS)^DtT~JtPbghe^4uxx=@HB(NP( z+7;(MbIqnfbLEd%r7QE#R$cE6-udu$)F;KUIM7v03oFDU)qnrFDdXo`9|2BnrmEHs z?kVWe-0+JH+b|UnH1;_MsAE;_<(>M)B7N36fdXN_SrJ_p;{C`C+A{&B+9k>}D9Z$g zH;mS1Sqf$5aUL1;L$(O^b4uH>%#V*~6GI~K;IoMKW-x0`k%33GPw)_zeG=V#)Obr>YgpDz->ryExT?zbt+_O0n`r}t8nJIl*fr! zx*{}bfAR#z5m%Dq@Fs4-5kEx#$?g%s5P5YGcIS2f1s^;;UA~NiR z>gDkjnvgA1m4x-&l)oy=1^NS(<23!P1X?~WB3(37&i@j#kUW(jZ-Oj_gz;pUK+&*5 zH5765l7YkLWRxl%yvL-3dF+}F*B3k9Qh`wc0!GFn47R<&n*Ms}`JW1v&#<{+H=MVz z4zyUd^Y2^Elg^5qtExJY*50<%r&h(j6nKt6w#Tp_k?|(`o$_J1wOYc*{me5@2E1!->Mhdcj467SlVK!pdE`aqa z3kDw&7nLe)LmgKoVH^@=pgh97PAKcyGExH|O(YT>#T{wju*#eWzYS1d%TL98h1yhP zQiIHE3M~L1`-rVfKsn9B>Or3(1PkPWrlLqgPbGfs?YD#$JhI~sU6Cm;xUdi=?;-64 zs=OSF>{#w10^OnQvG91|{C~<8Rj{MxgKIMmGBjPJjAKWr))ebOhSKcvziM+NFkV%I za*LW!(tL>Eg=s6pdc>Fkdl~+?>r2(RioA{*yRllnTnaWyaptthTlymB{~B?D>aWCI z&rN^hNMc2@7#YTDUdGIHfvkbuPNK~4Z+i9ZF1u)UsT)J4dN}4<9P08Y_!0LJ^=URu zA39D~h{RS>f;=MGK{dr|D`FWq_9<&Rz#PeVSvO+d5GhqA(K$z}LDi(7o}hi7!ehO7 z*!xYEjz+w?)@Qx2<=U+l=W4kVB7F>DoE*cezZa!{th3Sm>SnS)D+9E_HSIHA9xq!4 zp@fd&t3v)$%mDx^e%FyTN`BD4_+Pb5mu90-rQ_22ZP^%hoJ57rCwp0K-4|}+(L8vS zTH=2*oNSa0fp<^RZZ72rT%y*S-*C0;aj2SHs8GS;6>xmZuGEaVPyZLceNDb zo%T318mZdzoHKr?@(4vviXgLv!X>|lrJRSSIteQn&ozbl68FQaWCP~@mKt+K3Is?O zhp<7oQ{gak`w2Oopey2Xmn3|2R`BlH-2n%_mZJP*hsexk{QIkou0{)xC&)ZQ&_&;* z&ha(F&V1ohxH+l7&^(F0v>~Hx z-1qpbF6KILS5RY-Rw<{YBo_4H_eg~@>3{EkjI4pmj!)DZREr!;I5uWon=Sb_SM3+d zULQMXy1%)WmcL_#UYc|W9y1d^(au;pbI6Bf47S5&z@7js;u%STpa%*PXZ~-|0!oZ} z`n9nv4;+4{Cx)}!w$S9Dgxi~*!Qds>8Yp}2dk?nxGJ2HRM4})P;2sD`&WZAR z70A;}>`76U#mUblJJU_cZ1O#9n&KR^N`4NHMDrEj(r3r1>>sF+6a}t!&^boGXa3Jk zOLt-LJGNA!>#y0R`gp%%ur$a5upGU~kuMo9n znZmGs4N7#f9)BtQd3mVbRYlt&CC+iHU#QM5y}SZTOstv2>0P}2<+~0t6#K06d=iGN z4E@j85l8OT?TWhCaiMg{;mO|P*T!?Q>Ruo7n@lAAnPd~2(SA{E8T;bwyqpBEGCBz< zv8puKGqW$%p1cs$BsX`QJSJy@k%=p*kCJYx+@6>Dx|DJC!e^aT4$JS3ie=dtz%rFY z&)b}2MJq5n!j!B{02PviKxm_HpZJ5lf@~5TCcOhvDA3Z_G*uf*3}1YSk?X4vu4Gc# z@mDUEC+%N3ITL6EsnX(NJp59IA1|*c9=X!?+3qHSU`QUUpMASJL%q1dp&qAk>Geid zisN3LP+skWyI@_{v8 zW0`Fd@71tdDI=ELH%Ck(_AZ7QqGHNU2U0!Az#a6%OoPLTzgAy%qLq;#ceOUf{8b3= zugJq#h9ajyEmlaj^KIwi+Txx{@J$;a#JuB^ zejgW06dZpY9_h70`t$cj{rO zlL8?)$vBI`2M7$Q{-`5kE0NqwXA}!nMy7(+LCfQ>P0rB{8--| ztKF)F&mKYb@%KGhwrc+*{j@g(Cs(Nn(yoxL8Xt_Tf$A7f?Z#>$DE z`8S1Q_;bk{mCvlOf6Kl_E&O;E*-wu zb}#f0CCA(gJI?pi#r4RwCkek1RmHlg9}Qw?gczywJ=M0W-vZtQJ-zzZ6ZYr+06m~Y z>h}Czom?l@z3;{F&Wf1$@+b}~3z79ZCwAVA+`&=b)z0JXwR6#SQ+K!-n0=-A z)^{ir@xbvXzVSznki3-}6_y*?l%5+C@y~uk-}iCXKfp_W`m_jpZY9y#d?U_)hraji zXtk%`fy{(*9oRv?-6>%QJDKLzImSxTmJ*HV)w*gkjeBI%Likuq41gjHP2#O6;QNbd z{Wr3H?g~)7z7IBZKZsQ_)dq6z#e#|e@)+EZgK zwLl?D8YnC5e}q1A>W1B2XPPm#?~+)FfbmF6;}2V6R}no2e~Jvwtmvt#lX6vPy6=_m z81fglrspt$x-^sQV`UVXKTCZBY6{r=|9=P%&c`dGq^Zh8kp1at^*rnWD4^z)cJ6L3 zO2e+vRE5EQhA$Hujb#veB(2wc?iBYQh=5Rr{^vRju|c7M?^9{_Qy4r|ZJBqgV0^F}FxHvvRh6@Z;_k5a{==~C8LwtY?l~3x#-K5VwSE;obqvurJ^I)f zwPv-ZA~UJ+^-W+}W>t|C4H^8m+pV!|x&q0Wm}>M+fT+lS)y}gx6oaq;P(IQ?2N*Al zqIfA7+no>su1r;{dawL7=g4~?w%)mNXhl(_nvB8&J4I-7z2S4q!6X*2=~q4JbvBdm zCDDiu4%KTkmCRC_bP&@BdN`#S7d+P8r0snsr|pXjYD++72h_1DQyvSP&<^fw7s$7S z+WFA726n7@LHFO+-M*4S1|TbUX~3aLFrM-$ONuGRfYKn<>w>Sex_=|Qu>r_YD3V*G zeLENY%?o@kA?CXG2Bd_ywVINkeN?X;JA_+>Q!N@`lkY(B@9>{zhuA|x4B_XCAqk4V zc_h-#7y39JGby5qbN+FF)=Ac?(Bcmgw$36Sq?w_27~xgMbR*=KWbKG}1n(i&R`p55 z?tu8#bO~3P@CQgnV--I^{{yy2}4a8*Ahpc=ve zOc5(t3X5a;?qQ-zZUEc~Xs+}CsfVuRPMOgVwE~*{_l%RfRez9s3;otR)ekmX@#J!c z8JifhnUE+alZ_-%GEzOjPshIZ@S{P#M_E?yk(R$|Wp%$MR(L1|J-e>khsyQHZ2)js zO@WvyXJ&m)m083}P%8+2E1Y4S8>*_R!AlB4Wdvgyo=SEI8&EJzv|M0Z0K*xrIp9{HRYuwnL5(@QRaL@)0S`8keHYQ3suEp^pYLvm;;g(@L$60&3@`yCbPCC zA$bEu;gLA2t-+V<5Y^ukfq@Zyjm$)kPo2Db5Fb%bS{rt874Q!@8ijILiMxTAV5n|tJ zvmL`lUPRb0AQWf6#XiKrn@he1<#@-HL$-*?B~Ed%aM_Mc4iz55Z#d&*lrQ3@u1Mlm zx*|A+yKqOHANe6kEPNk?N3JGmcj|hcXFqV`L@qFfse}9$&pS_a{B$c#zH+`X!;|;E zK0KxQKX|VVu$jDpjE=!QX(UK?EG#eoUXajz!(9D;F+B!}EV5(99l!pNQp>g9D|AWg6eCdr!Z{ z|F(yqZb`qk4Ssl-01>We_k5sUH=JsJ-=Z@OLe)$rJwwYkzQ0U4x*D{+C}U-d9LIi} zo+0~U_G0=*aHb>n+LhO;y+?iG122&qo0I&3^%Dv2O6G&{09WVTB*X{?w%`RTKBJKm z>p&kP5|@^+exth;Hi&bg z`vHEBNJ#RgjWN+>aT}tnw_LGxbi;mbt5@cyx6iEh(Ub(8v_9as4CV&T0AU$<#RC}h z0$^}5@@ln|7DZS%+*se7e%=-g^U_wo+eODvy%r9UKqpMx+C=G5{|(m}XiGi*Mc6|n zrgY7q(=^KBqrMF>e&Osc6a6oo@@yn3`-X1hPY$;y>QC2M{ys z6&(>NK`t|tEqo_+?fXQxb00599SW$nhTP(dK^Tnv(Y*Mu@sYNrtBiEfp2Hm374HiRa721-Nw43 zn2_-cD|mNMF+6V`IB)xaT%%DRcOA?BH);lit{R0`!F|rW!4PURDoSU9jF0wLO9z6P zQ#vNyEbSE$NlX^IiNY~&sy$GmibIHT?|PT-h>xmud{jGHV@!gQyPRm_O_Jebc)sP3 zm4;&q;+2&E)@AUAfz#^z53pPt-N+I0$?J;a=0_dRc2zm{IoCt04%LOyi!+2Y zA@`}_8nV_f#KyQPsW_3XFM|)X$vIa~<5H%dc-imnf*JgC70r8{rkeUcts+SSS_6-; z(BNaLqq=j0?1pgUg=wJ=vbJ!oXOhhnX6ynIF@SG#=0NWeD)E@v8d{#$s!GHg#UBCM z&yJ=~@$S3Bug-*AsX) z^_|D8P`fB!1at?<_r%RIt<${z*37?lnr}Q~FQV4_DHCtblPO-dYg+C-QBrr!Jd^FY zY_L{zbbuJJ85ji{tr^-597%RPlJjeXRn+^#{Q*a7)>0UiM6>Wg_pheu4~V5jCyz5_ zGEG4LxR%%frIu#tv#td7BGaq711I%)P8I`p&?5`q2PIA2Wib;td_I>>CVQ=#jR((6 z70DeG>m^0}xbp>@l8u-Z%?}J2YDHx2Py~NJug*#H_=v334-t z$K*>|_UUW_b%#eKdKl<8&6NECBvSw4l4Mcf^`388vNg=T%X>@S(u|k8knVdEg~%mI zOvy`V^=wBYPgNgI9}zm}sO|7@ID(iD=eymAO~xf^IvlYMEL6pliAm~9>sU!E_n=)0 z^i1m?P~C!8-AlS%8Fie@5}fO?YkQ54!6j)vv56gDZdOCytt3NaR(4&rn4zPEFO0mtRtCzmd`OznoP7-ghGD*qa zBH5JER~4S)^p$R(xua(+V(C9nts?5~k(2(+8y-J2KUQ6A(}_o1lw|QOnBSkg=rW1>zEpAJekA0bp*U^ z@Npfz?tUmJ6jk2}+`?~6;J0@_9lliJ&-KC%f?MM{6XbLZJ>3wLc; z`H>eX7$9#A1YJMgtEK5Klkhj?%a*j|O=}#w1GauW{3w3|{u*cg-W?7Oz%ZmUkmD)k zx{UqIw+#y(nM35Hzb4V%ViDp7GttB6sNI9aA#!rv#2ao(BHoOfn}A-x3RF-Ad&#E~ zbCb+XpmJ3S1BsXf5q`%t@xyg<59##;*F>SJ^``{N@(Y7T1tepr$cd`kr3L3Z{X>!r z!j-oW&yS3q@48TSL1S*H;!W5#Nz#dt+KCUC{ui9Y^opCB(NN9%>Pk56ndk#)7R##7 zTxs#;tE1^FWg!t(zQ=1lry@X(&3YL>J13rv3zH#~@9nRvHY#z+kttdUM?&YR?d>d@ z{PM$BtZX6etvS7XD`Zlb_iT+`pZ_RDo>eRl&Q>BwuElszQ9q|>youNReTH%xh%dXD z?^+HrJf?X=<~9}9agoG;+{As0Pq1@{GWhm`jyn68-`!M*kr;abn7@r&bC-v)CI3ud z#K(s;Kd`G(GR_Yrt?edpEL59_ual2|aomIUJj@$-z!SGKbUY?1KzAR-uWPtg4D?Fi^uBh1sf|Rus3 zru->ksXwRm^(RdKPTI}xh zr3@Nr)8RswWP=N;I+~1_S9hxZ9qQVwJ$%}1*Jud)g72>j9xv*2bab{)Odn5O;ogp4 z*m{9HC^Bh`Se`Y!epFmOYxp-9>XnH(LJW~_=FQJ5d3}xUx7YImRcScii~RBP$QHE? zH+i<*8|>DWa?wX66kpn+WjOk5Q}ZM0n!0k|IAbYZe+%q;45Qo}`{=XaAls_{0Mh&e z^X+Jpq)gSH4&qK18m_xEhs?O|^2n&G$m1NEO1E8KqaPEWOx=|FnX3%j3{E@ak!rMC zdq7)7Sc{*IhKN8MbgB4Gv-ke6Z+A-!VPRb8E;#WjY{x|CIn5UxD78>MzhgO3%NA4Q zbl0?_{SNyI zM11&4Dqw6gyCRkOSzVF6gonbXJi2ywqaVuo_YVOTti!rKD6!uzo3OFDEbdW{NO3U= zEHEE;ldbm`gtS1ZZqE9r#|4-fcIR`f;WRbaSlzaXz zYuJ|lcOK>2x25!~$E;6;9)w(L${019d780&N^UQqU|LoaI0_j`7J@AkwHTZ{E+tGc z*l!dtL7?dqdrga6EtgTZ=JpxV6&Yh0N_7Z56(m5!st62@%rHsDUxBTV$CL-I%^9MX zk!b#!{Ut;jkS#n{k*S=eLP-U+M#@l7YaTa(jS+fQ|38M+h9$CB1@m6m1m#-<12!}n zcs;0EFKXRfZIxq;2m;ENSgKoOX()=%dYTA{$e;QHy~^w~a1oY~!>VITw2OniJe$hT z_0=60WTH^GP(vAO+i3b6&>tYFLCU%GuUd?X&PACC8!L@}MEj^5@oajViQ%balBgu5 z2``Yz>Ap6!#|f{R8TFzj^KO6c?Eg}#tdoz;MDB=(cj}?C6b3+c7TKwSgQy9b z713@*jyP9iWMuT}lX|)N5+|dItC!dOWKO$x6+WyNp*gc3_&*Tr7V{L z#AMv8=cdanqpfrTjZUyJH~^OhgIb=;RsDZVy?Z#6`TswzZ8uvgTarVVZB;gu3OU5g zwr$x`rjqkvk|K#o$Z=-2Efg|aa%N^r$Z?V!5+=qWiNRo;#~4gw9A?bH9Pjy227NYP2HNr)#6Mv2f`A_8K?AG&~ybJEFBg_ z&&o%#8b{VegVRa7%I<#7+G~`vECXxIvV*VWcF{Jum;O^R==VQyCQhi>(CMVIP!nl? zPXG8Z+b;++8reI-fRJ%+qEojc-(L>@)3oTU`9jy$z>8mF+&pP?fzWv88X zW#mm?=r|Fdw-$o=I!7v)Z!kT5`}hgXq%P*wQQwND#A0kbgQfeNklt zFDpf*CpZJ9#{{v#rE}LC;>p&~5YQp1FK6!tyk)(^T8kvE=Qr$j4O05J4bX@n8ua-c zV0xFE=yhflUwD0@Z_f^|t26m}_w30f_4UFRKytMXjG3+=`I`j4O9VjLGR)}V-rc(O z_kje!P4n4Pe?=(WI&6bXwf^|@u^)Sxd1mRmD`5QB3MbEG<}Rn){7*#(aTB@a`{#SM zIe%nquhj!0sXTRYNwP3t4i_Db`OEI_IDgq;T9Y3W{SdA>(iF&2qA&1>4;7Xrn8M3L zq?HTB#6Z7SfnQ^JD(rt-Ni&RB>x1X&T<(8qFr?T8cZF>-gmzZP5S)8cI&KVh)F-5Q zJg01Fsd?O69i8*5=^t8h%fV##Qom|#-00oVZ3KsBE?-K#AWbf5J;VT5$4|?}HBRE7 ziCMhF*opi~p(pWsB`G=2D=w+z(hH#-1*$HzhzrPz{Y`uQ@Y2nTwwVYfY(VCBcasB) z$Rc)NjfNB_E4Wr!whz~Tn)FV|%VGP5_T7o@s#Y8c78ycYKvng_X@1u7jj~15Hte3W zaZJ=-$D_KI#9u1?s^Widq&$C6dHnI-uB`}Y}@ZJD1)U3s4t(%^U1H$)3t=hE1A zdXe0;3mCTnRNI$z3S;}RRS|8Y=&(s|m~~UYG3$Kq8dFydwd4-FZ{P2FH`saqGSLAv z|0}NCk-WbawzI>|S=OR8NeReqwApW}iDQgV$l|Pops5g%+sk2PAnH(K%_+1J4Dt~f zzIC;U*E2mrn+Oz=k-e=_Rh#RbAEn$2=5$gSwprk2i0`#YC7!sydcRW@L8Wm}gEUH? ztnS_5XRz^a`Jlcr=hE|1Cv6lOsG5*HWu?1c985m=hxaG*r=x*7vcNTad2a8!7Qp-X z^7K9x=|p7PVJ$U3W-VVnO>ofe0vh%JFY+h_;dp zlG2#UGOT@ctUnLQY-t46g%70$0jH+9v!A1e?qgq!|ajm zc?7Q_-m3I>fNQ$1Ff0dH`^R;!e!OT&-nnV-nSt6Lbxb{|tJo(qPqf;geIu5=Du+_x zztn)BK0szOE-DbGq;5@;%BwYXiE8fguS1rfG*`X{2n@St(cF(Ld@nfI(sak@TY zD^VcDm?-e9RSk2hhIKlu-6kXO%oCj0`?$^r=9@8JEii!CHVlUKNaYXLo^>4B=_nNw ze_GH1lHwScSkE9B`n=Y3xWOe869jkysB+vWQtnCzqp)hLy8Q-me62I}bm58KBfat^ z+)JYD5eD=20yZ@I8O4LQ(s*cvd1P|b$tiB@)xTmR0Z1<#RV1q%I@WEKXN4uQRY3}; zz}}%!gn_{BTFuKka7dUg&MQD}1b~Sz=GtG))0c&$PU~`$!pDVvz4?lxvQXJ}rgz*ke3WiKRgoS>xt;4R2`C`Y2S{G#vXj@tK672JnD_+`07Qc0 z6v?dDXx+E#3&t_#b5zJm5 z|CCD~ZQ89$3_@?P8U7LAy1M3U**_~Zi5`4$%`EYeRcQsMp6(_dk?AuO=hekn+^jQT z@EcG{oTQqJXnzVV`owA5UA76+rNv{Df#f18>tz$_F z(~1Lb$sNoBzDM9lOIb^;$RW8+d1IDW)yv>oyH@ZTs**I07u?{{+H5JO+v%QZ{~mG| zsX~7Ci4I~TYmve5S~aULk1tDOC~>ErrY5hOrRjcIkBZ%%zyAm(Us=pX%>Ad51~`l= z0$hxcwTeNORN-`G(>f)ONcSOH=Y^5Zg(r?XLhL7x$?9}sJr6q$uZ9Z^HWxlBP3Q>J z{@lXQ0b?7K7zN!rojfpDv)}{T!3$yPuv_ewXetdqf2Fc()s8sN;LWgvtXhl@K)bc} zk4Agz|Nc{`C|<1`yWP}AHSI5%T!UdbQLymtZ7Qt3^1|?XVba+Xk&VH@p5wZ(`R zIURags-j#58C=~ZvI|6pwM)f|X79$@4((6885;vjvnNwukY2awZhpnX=EYYX_qvY` z=K@tfmg@KSSbFpa08kz=tvh7LMZeRBvy_JGyT|0(Akl!H(^EwHdwnMQ?&9Y;*xL>` zJexzu|K?;(dOCY+h#zn1sQGtoteIwF`xr_5umGyLm-~TJw33RVRikgTG$yu}&prJwKDNmKOb(j~m&z5Iz_9oaD2tscWRZ z*E|QEg~x6^(a7YmQLC~Jzvw8d4!?2N8l>9&c24k#;+5Lw97%xLu{K&|x1BFc9WDDY zb2lPsqy=5x@<>vJP@7_SV&0m)X$=$?0yL3^2?=c4Y6+5V#C>YLCVdgi#u;fAQ@R4w z`#cK6pC+s+n~KpDXT`kMN3^JNSCb{_Cr9GZ{7tNrG{5h?YS>pl*K$I4mmv zW@49{MjC_p3B4r0Pa@7!8LJ*lIA`_kZ^6@*gR#4V4??9!& zB6lhFoA#l#P`2Kt=lr~nV5t93z`L85>0HMj`O&VKVDsR6`*>cnPwj^RVjo~l+c7^_Ti+gf@Ob(8FMCJ?$ z5t+DiLi}54wQ`006qFbwR@d>cq&DU)zc5}0c7d#DTT-gyJDoFOJ6tuOiaW9P33G0+ z=4A;1koUvDqG+rM5ovLX$Xf?7SdhEGdb0*n*@P7Or7wW?Li-k;88kfo4|W!5*n zB9jc-yddo8dKVZe9{8YUO*W(9d0i|ioh>akfW;YzNm`Nmtw>KCu1iMu zD_K|ID(}{XVNoS8CcaBhRn`MSI+U>{dF<%%k+tx=#lCO64`n0;2{;vB8Iig6*#9RX zn;`<*cgo0nFT^xt-H9WSI0Pm4)q{2SsZ|+-8)@>)FpfiVe@lJg`j1imhMzFgt(6&PB@?g~D4i4A2btQY--Fx1ze>9gyK4s*x@ zpa53g>sCOGMl~f{W29kJW9SKgo@up!bE`p zUi21B8joclxZ%ZVhM6Ti{E_BbC1b$`^2XtrzHt?&xDo*Lk?>_g(v=~^h&sSIoYZ8G zx1+dCB%Cj~7#J&`*?qa>@Sg??2Cr@fUF0wO(s~3%R4Mdcv`ias!g zvw1Nb^}Fu`(n|IkJMd}sTk+UMI@!I@#JWg3ypS)ws8lT>uTdpi@h#n9Z%dX$Zk)k=DU7)23-bM~|2})Ta zl=y|jGO9vi4(&XhVh&mxUviQ!k!0A6y~~kpSmWX7kzGSk%A*VpYARo&b2M}x({5Xo ztLP1PF}nOjInyOw;Oc2Ps(^}5Ijd*8$SP6b?NIcYSo8xC(x_XTtz7LYVRvz9@Ubvc zE0n)pAYGwU1@KYz4zX9mwWS}aQdke2N4iCu&Gp#3_5${Wh35!K>9bp|Kn>+5y)bC9 zLj8-k%!nYvw^nY^nG%;8Ent(C#*)#Ds- zJXZef#D;u59C6Ng^Dlox0uh@A*iaxnQ}FZ09`^A6Xa7O1ZI6Fx_3KSKRBs_?01xrmei`x$@!dhB%#)Id}! zq#H!u*|fhR+UQD+;N{RE+yH15iT1y-f%zYI5p(2D_)wr!#xF^#Ro^5@_u$o4hFG=~ zGL>h*xTNuHi}=*l2OnEIo$PV}Nofc}Xrv-=d zox~C1mfg}lQ&K;eksXOa_q&`s{5VcOBpX}HA=t>Ps_5>HX8TKvag&og#3i6cd*Lb< zzOD8z_VKx~Z)_DaqD6(;-H3m>=F^~-VJS!cvn1_iq)DavmribKYMSlL z)5*C96Vd>(>Fg^nrUMAl%7xmvb!^1_4Asya7C94=?w<@M*w?3{Tw&%;Hg(Keut;j-)J zmnZ3Y(GN~GW!m^HbD%h($o^;t#xQ*A!55^lxUImz}_hAMFzla74$zf638MS5nf?dGdo18W!xtKidx3r%7HGH+SK5ig+Q zX^cvYR$0)!@Uv`FMvH!|u&d-G!*W4)?#Q@K9n7i);fv5%mb!7JXM&aw{b;o8>DKs` z6Oku`tLmqB5?4#yzCn>o*>w<-i3jm#SSA#gXZ-}N0v}c`^k-Mqt^6>ZDpe_)UN5S| zo2+BVs1guM0j5Z1oMGtnGKw6@UQbUbx$KVposjOotEmSp5k(?!_T-V8gWluqPoY%+ z?Nl@`7Bux!7<97a8{@(EK*I-tuK(9q5mX37n{x$Rr`mIq>+$siK4T$_(y@eACa)np zux;J#=`9`AjU`gBwd&UNpLJN`Ga1RYw;Nj?>%sT%Q;&b7*v^aw9|c{`C-0|EKjd@A zTCG3lAFM(mhYr*Z`Q@6jXhV2i0TLd!eeBxQ-W^T=U))sp&?LvtG#maKsHEg_WX z$>YJ#M`X>Bipq(d>2&pE%JO5pmu1mP?mho3HC~tgd^EIYHeOX+<%NR2Db4(EPwMR^ zAJNiGAj|2HCQ1$ZjNLCGwG#I7(n{hAxw4Q>VkxJ7)AX!b`jdxS|FPZrpi}i?c-Qw~ zg{mqW^Oa0G0kJ_sUq!X!A57~@UN9L;1-|{K#kh{<8^(VNCYg5=%syg=jNx46X#l!Cwqc zeC2kS$Xkt+8ceJ2I2;<2x1$$SLM$m%@<4J0@F=My?l&Igh*h2(RCpx-=uHx_ijZ17 zu+w^vNg}cU4P`0Q1PE=JPgvMHFN?A1UNU7gCl&Zlns@o3O+f=h6o70llJF5~3}x2E zWSI0un4$)>XioOw%TL!8h|2f_VF`(Qc3g(7E!35R1)BwC!6B_Q4Iu7NjEhi3L?)2q zQ}Aq*=8$H!{t#nmBRE8e3`!icCLC~N2YLg{qMTSL2f?2^fz5i5hZx!j|;KB1|P|&y6cD zUGoA13;!&g`-RFE%OD7;E3tVI2PZgpMm=(>6)g>R*lSIlNL#PR#i065g6Bio%f7W$ zJ~5u&fJj}a+(ToehT(k)wF?Lll@F1lc%=Sg@2|{vxDxq*%86R=&x_OUQcFzY;ikzX z#eRf4c?J(trQ&h73amQo!3A!wpMf5{j_%R_k*}(_uYgTJ4~Qi!6nziM*Z0fe(EYhV zHtp-$Dq+Hn91*D#CNa!v%Wm2uhX*r$TCGU6?P@3PdS2T|1#kv0N0>Z+#2Tzh5v44# z?&incz7%WyBqaA!*K0dBNr#Zm(vA0F9X3}@s64gf!Hl9t?H|7|H4i^-`K-X(Qslc5 z_^3T7wBRXcPLllm>%c?fauOX<7ljw5Ep_Gq4-If5Vht$wOV9_YEgSTkw+{K%>x zKf(S-v3fWH4w!KI>@Of?+^6dUY1&ve-Wd-#Z3b6*7VQi>FTLVkMk-%U>0)9@S1<(F zjx%t!qNOTM^a*Xs>1-_5rN`SeXU8^ip`Pp=lrtL zV#A>r(AU1QXuhmZB>I1~=PDP!`C67)O3ae16R*5^XKq#wEKY#Jz`x$E8s}XPGfWiJ zS=6i3=NH{mU0o8(lf{vqh2^V>C$F#M-+(fn#Xn~$z4s*ibXsSjShH>7!g*;NwdTz+jacM)X+-iIq30Wew+qhRa5_7xLCAo=|=1qGwUT z6QEOF6eJ!m&WljDO~n7F@-yaqYJ}ZfzEa&%f=h<$%)t*_QvjVs?Y24hNz@kh=xsN- zla?}1GofSlZIi2(9I2MTeF*GvDrdu7aaVeddJv4e7WLG+bn3qwqBi&^S9`3e8d1OD zvyJ5D;3ICycKc7|a+B*f{ler9zv}3sL|GqsJlgwhSKstTN!N_JrsCWXR-X%L^Aqv2 zXZSwT1x1{~^N1!|M`8p9>D;BbKa}>q@1)IlE&uXKt?2Ji94p=CeB4;$^Rw&iGbv41 zw_ATwo%}8~{E85lWU!0JNQuPn`8wk45o`-(m2f;cRyMR1p%MoDCJ}N|(+Wi>?cj^! zo|J`YESofjN0a)as?>Kk?VN!d^XMseus`#a@PG`gh%=&!%nvc*g*d+EV)H^V`~7zZyH5TJALfX5uxV%7C&R@HWX{ z-YHjm+`;q$S`Q0EPsWzDI&#esqIGmyr@fI!-S=4T>3Y~0eN%kE*EHn`pfi$PJid9g z(BO9dg?u2K4Uy|huvP1*!PQm2ofJz7F3OOv4PARxJDBq4{#wLW+PAx|FT87w#53!_ z7q0KRZV{f}(?;Dv)VnUxLyCQa{sG&sYt`-*BokTH4pZkAV+P1>k!_s7xClfg@BL8G zgEoqOLi4W7W^u3eJ`-bhkuG|~IL8gyGR2vZPkvt9#|y8*`A5fN!d6jVpN0p^6%k1r zc~r2N?IY(9CC6*Tw~{DiwU#5;p+<2*f!pkX2Dnc2?pY>sV%ZMHbXHc+{=IqTD|R0} za+}5dXCJ@XmRhr-gwhzTzGBo=s6kii^(dJ?k5!E5Ll%*CTnk6zoiOq-=p*II2|<;>Fde^|ZJJEhLF50m6>333lv>wtTW68Ke_qpWB2BJ2 zMlN^HMf63hL~_Pk&hhKg_rH5qbM+JQg`U4?F3r3@j=oSE_N;A|^ZTymzpLof9=0?T zcQ)FeFj8_RO?|xRY^9c485-I~BC*z{s{;<%l_Iy%iN3%)A*|EEjtCzK=qMW1pZS^H zp9T>g=yV3tMW1HVpQhzskBd+U`tI23uIam-oL;+S5O8d}KZlkd24QTsC=CrOH^&)y zmb-ocpzn%CFGp_Wa)?EsDDmW@l1rBf2P$j5NGB$o#6q|Cf;!T^(Y04|0;-e4NszdF zo9G&rX{(Iw2m_=yyM(DUnYLtX?Ty%<_1OSz1JB>ygfm0Es5-FTDic*^e|SdU!2*|y z6Rr2t<`sJ&M@OiYA8^TG-Z+a9+LRNv!=G7 z_FhX~1gNdCph9nT#+m>E&sSefiCYN%7&OU^sz2gQoJvh~CyaK_Xe>V%B&%s1dqBOs z9g~QFT@rCoq2RB6gCBI{Ylt86HJdK$nA-iq;H8cq|4~)GzqnfwXk(AB+HNvyc~?8_ z-*71lL0elzExX;%KM0jZd$f&uFtMc6&Y6*r94X5jDXv7<$ZFcAZdblsg<>U~b_N&r zI7hfQIg%H%2Wo2rz4LW|L>@*%bP=gw1^fJ4VcMcM5shaJGg4P~?0+YTWacS7N3+Af z%^Mw|eX=u~>;v-ipMjshqY4en6&Lu#CqL8PneAMp-Lef${~@u!FLaZI@^F7@pcC!P z25SY+{s*iRc-^B-JK!n22c2-sXL`LXoW`DDN1`lAa`7qQKNW$VumhVlHr{LcPyRQx zGB#BzB(cn1)uEH9cE~PT`(+oqX1w!6ju zBq*nv4uhjnBO%MU$zDFA&vXK|zC%K<5aK+ApNmVTq5~Eqr`Rgqc?KnZ?~fA(qc<{<^c7isEx6^yjZG zNj>y2>LtZan_@T8(vFT3uBP4Wqa<4In-g(p_n!*6qt|tX)gGZJFP((Wpi?@3N4o(! zS%lYeG8gR#ISed#>~_uuE7S4njI9BCG(fo+MDbmd)sbD>D~l5Ejk^YMUIrcvG9}-X zYj>UrTiR|;d)ki)8~Fg*$gk<#MnX>E!Hvu53oLDwt#|%p_lsDDi1Uw3Uwmk)tzFQ?;pwK@PH7F$SkbG#*s{3nlT9Q25w;-Ms@0&v}?gPN-T>fJT2MxucdeD&VND0c3m7+8H{*h8n}`<@Y`_q+?;pN zZ*#c{?JZZnI}$a2MgF(y91@-_vV?Lassz634$0}(n0gKHDCfF;ejb@YFgn~ed&}=_ zlh=jr(rb4=`Msh=g&KcwEZ7hI5ncXQ>rC`ITt;|aEg>$SKcjP^;<(XnQZmK22fB7! z?0Jn)pmd|q-%N6<)${u55dqu!j+{BEN(2G%TzOH8 zwsi+#VNs?I8q;`6{>a(r_FX)ht(L)<;QWru3t|Cv+ZU{Cbt|OBpp=uQ)h7 zxmYuWKmHYx=d(AGk)ar+OmB z4h_Fz&GSY2PR9^R7hlu!M{L;&2mu-%)CUO$^c?YyeAg_ntW)Kz=k~oXo5JJ{ejc1$ zIAgT&{>P81`(|RV)0;0x+`Lq3TaizRH$n&Agiuy&vpcPw1T66*OBt zTs@lSmlf|!LwY@pcBABy9!GxA*E;j=eN)p=l|Mk+R~{N)5wi(y$nUqZ8jJ+1N;Iz3 z7I&5SqXNz=F5sK9cIOlaqjtWV(`qz&NKmgGQ=ItOA2QNM)GGY`c0x{Z(!2G{mcSI| zdwD-x0~g3xd@}|l*++`LN#~-M#@_G>5^ZP}sdFnv`y>zSsy}Z0S$z_zDRFK?Y;G}F z^xZdPS&rFA!Rpd*@9ZvAESf9RKH`S{;6F1T3QzAeQH;qPFh*~*CnFDUoXHPu$oB?u z_o;h*O79I(Q9^sy!8WEw2gapd1 zAJwF7e9ZR>-~BdnxH`m0bld452l~8PIN^1t#a>dvL?yz18de0de6ZX0P;xa(#v_4A za@bmWf7qnBGh9Yr^OxVnP8(P&D-?f3oN!Dbt1I>nSnfb1DGn{11*ta4qr_wut$Bo4 zaPhl8^%4Zs%R$6?{PO!MXsE?9WDliSa9-lbEbZnkJhd^DU!7BzoUKh*JiAeK-(Mv! zX6Wq3Y{eIA<+?SCJS6uw80)}L{~|k7l;2+*U=fUZ?uaBkJ2Op8DQ@1}gy+wiqX7~E zCT=mwnKgQhTh>8Y9ne}F%}RYbl^-BE2Vep@x6V4{|qe+Cd%1FF6hLE)+!GL0u2kqO{KMv%DxoyriUGq9*!5Hs9;wa zW2r=HcgXuaqw3a|U*3b?jWofEHNZ&d?yF7ifr!Q=Jg+m14ui;)x4f1ofl=X`KYU2k z1_7g7B#YfJ*nqQAMrCvvttpR61k$17bb;rrFEIKRf^x=QcIgpigmkXb*3xIU(8p^z zc*)f<_l7j|Mne07Hi~o+pbGCc!mf-dEP0cI>c;-ZPly&{>{WdkY>Rj_Z}n{5K&TPz z+8R66uRC;RZYR?D=|>}=OwyEchTlA$V|TY_@8qAI>L-PrXZuv+F~KX;v8WkxJ8~j$ zc0CdPW4uWn*vPnk^^VSR%Vq2~fDL$5zh2DEvR=HSViRbr8jp?1uHf!{U1N#`exXZS zqALQ-c1{8Z;lq2Bg_@7ro8RBAtdf<(MslLGBzp_)gyI}E|CzJdee_0j<+A8iOZj7* zmu&jrasmYtsF}=3Xd|`2B>U+K4KLs>u}eezfYx*n9aPF8swzxMM{nUYn=s!U0VdTy zPk@cTKJD$1$ll050Q=iza;5O*At)QM4cZf}F~j)F?hvnrS(0=+-fV{SL(oX<;m0pI zVt`nvH2p5&lU|yNULZcaTS^umRpzv4LY=TpO(%E`%+QS6BaPRT<-YVH)LUOkmzOm( zI3(KnTPXH&nB>`};3b`E6P5N-6)RPh)OC?woIe;XS=(XwGT&o?SKca~)fBc$4Zh*h z&C%PG<;iEgEIM#Co)Z|?5fDAA9>Z8A0Dg4pc%Sin%zp0+2D?A9j#^jfhBC&pvmyll z^EP@{al$?oUb?+$!2^Tvmpz1 zYA3IJV*Vb0Cvk5dH`?}?wn~mGx&{~lQiAs2-sGGA_ST)HUQ;cFP>Ci5&CiXm`g13U z@j`uWFS?js@yj!H<5_LoJeS9QFnR6$4}5IRRlD?qxptxkG}1c~Z0>X=>LMKAO1f4U zcN|}Nvl>}UADq`qkgr3T_vRvQ7mM(qo+O#A zX7nl7pE9e?@&)VPr!&{>n=$4Rk-lwU=m|+^p#Yc_on5z2YT$AeQjVwH z2d#(6;-P2AU<+_*7iwd5XOAf;n;~g}Sf5;pe}X-L%KJ}6o4Y^p+FQg}m%fahD&(#% zne-)HBG`AhN$@PQ?a`K^)g6+0R87B79#?s0VU~5dfjgn^5NZodzvll_=^#md6&Doe zeLy8(^^#?y`G{n@Gu!@6eRd~e`DX(-VY*hC@zFpx46VcDWjpQpTx|`Ayzo8hrrzYS zyI)So-V&QrJG|7RRuScX1fyN6=<;G3uAC+5Nwii=T6@pm9+dEL1o|wa4<8{fyH~g= zPx~Dq1DkP@H2GU!@h#z3IBEHd&05DzkBirR4-d~L>hEzt1l8I;>3=H+9gKCWP^kTL zq%5A#f=PZgqnJHtB-F-~Ny^Xj2`}Ozp>!rtt8o&f)8$rn7+mHyoGof65a1>z!!$Dg zRCw^!edr-bAKfYPn}{Q-u(fTp?UUO_4b#*s${o+;Ft=dq#zG>Sh!3S@60Oi>u4L8fJV9V!y_)li{gOvkdbL)~nM1K|| z(j_!bAO)hqw$|@&3ow|%xS6<gU`?k7@3M$8FR#mb4 z6eX)JoCDt_(K=qfYoy(CGMHfGv+pX~+VGE`Q`dd%x0|dvvQ(FueB!JIgCmgitvdEs zRYl?2wlV8{_MG~-Pvwfrc%Axhqc%VwMw?@>V>q&Va6vj>d%xu1qZe9M4-yKQiK$&{om&(=%=|Fe1^wiSv)MDT9hL zjm>8NsqBo)%Q>(bUWW4d4mm(owF|V?x4ri71&1#B&X~G47Dqb%Q#lOg0@a@dop%~O zv8zR3ed5X3hMP%3{a|;Z7@%L;x+EzqGp)s(O^ejix&cWq2#5vU-wW>ex6G=Cj}i+H z;buU2SFLvs<)6;1`+(WU5bS8=r-3ozDUclt3 zGgX1vwvvYK*_b`)^U1^9vXS1P@zpbUE@2;qzsUsNMm<(8T0)PYK?*0Y8hy#tx-Uc< zMCWcZpLKii4)sWP6Fwr;qW45cE=oL2Ec{Pp6lS;G8a@^LRsV!ISec*s`!7H(p;6tB zx$VeCZs)NWokja#PsZ;dTJE$Wu&ssN+{Y5Yd_jnWXK<}_;|KmYknEQ}@pPF+6Np&a zaJ;4@t0U*hZS}Xap?lkApi%%Ndo$|vbKgPca$TQpEHM{ecbt10Tm3tt`*5WCVGm<&P&*fc11Ifyq5lW(ARPf92s zGqZ9_TGe+h9>ZPrCJKANmc=I@FF8|t*F8cSR>oT3jy{n3{Xe}huzs?3qo6i5IWWJd z?!M!49w|PKPFzTR6H|Oh>84R<@~XqK>>tl|+&7G69}DEO5G5Sv3F)UXrz9GwDyGW-fZQz?U3nW5ZzM!p2inwb z(Hg*2x8zPW8B2rc&ZD0tM4g0YSQ#_8j82xDeI?0Y1OKUj*-J3-?k)-Qu$rX z`%s))iv-PLw{5$Q`xg*97XZhG^e^N<(p1!Wb$TN95uFM7ke*OMh1ZsBt?_KNIQqpe zi8jd}iUs>N5uM@>J+V7+X~_NXzTP(?AZe_cam~#nj*~zkOG1z(g{fcomy0}cE-xGw z$s$ci1X!KGkkls^mST6Pbl)!&-@#Qfcbp5J65=`G69kllBYHzQz%3Z3H*S;?fA&?+ zj{}O6m7IV{z_!spab?(4x)S~@ngt|*<%Q<`&^UFWV^7bqP^MOiG4Cq&053CA* z{mlNv24%1y?SRaJ{XTSe^Laetv%s@V+{QYRT~WT9jugM*M|-s}mb&e2>THkT#z#3W zvO2eAfFm3_{411wTd^tto+3P|({OXh1US4{^3N6siSfO^4!ub6klZNqn#SZQjWNf$ zN)`DGPt{;lsHZDbYCu~syuTL*bs`aJ(S?q=v+Sj52XWxUCi@N6?1?o}d)Tx_^pk{Q zX@i4ByGIf9i5FcdtwfjmYXnbWY=eW#*}54;mn)3@!mc2eOg5(g&#o2H0ygh|sKlcB zEX7d>N3b`aZrRNvD|R`GBl)|+)hZpX^!GM**MYfKU!M3p^atC3h&R;aj4V zj1s_0{!_`svE@S!-_(|vgbmk%&6j9Ejv29$KDa z2l*QJuY$Iy#K&17hB5>?1h2Fn9OLd%02rhAMcesmR8~UthdwUN5tE`L(hMEP^a-qd z&ik1ekIe@q2cDjm@gU}>S(pG%0~koU*Wj&Kz*3wI2QhU9B~>gE75jXD`uk5TeHNka zCEL^Nd>$gRy6kP;5)u+_2mF$$3j@zQPGS!i$3E?FF4V0hvs)8@K6Pdh4sFp-;fj=4 z;&~y*H)a;8#?|%4Motl}RMWup!`{9TUoz4H;|Jakk)(GTsN2T|k9%;qtHid_V8XAw z#G3E>yGn0nmEhklaAlESZ4@RGy>FxeZppG&BgR8Mqy#5ALpeU7SczZJN;~@Wi+w5& z_FhI8t-<#WKwZHxR^u;{l!nSZf^Lu#$iBua)kS0eM^9~voAggBd>~TXjbo*A8rct< ztZ;Ix*0mO;Zczp%D=&V>^2=^l^ju6>3+)-{_mlcL+E8}?*xz#7qJNs^zQ?K{;t0x z&AyLjUG67IQTqC>}}#B#qowI=7DGZp^zJfGpLiK~CF8OqmU^u#m#PhV|1gZUI_OYqE; z2?eGr?8TjT6~qTJKZy?gv6~sW>DXLq)J#NYn8a&+y%o9QQmAki`4y*kDjQ zdJ@w1$Hcw8lpligIsqeKetC1Wa;6WnH9%{9RL|8dx$P!V?j|nQQma7;Hzx-dv*(u5 zaiRtY^+WE?mJq~A7afMQ}1is7O-n=-k$k3fZ)9CO(&{l|~V zP6gj!qy(ISxsQ>8pSzb!7plN0mEqtjj45%l;p&o}g|0bvKbs5g#)c~I^O$MuIMA@n zS-LLLv}L%0Xbu7wnI7)gyN;%UH~Zr*p32k1nRO|CP1`HQ8WP!H?3rNOpk9_*e$Ea& z=AuQ6m3;pf1T%z_8Lb>{Mf{`oKp{ZYZ0bt?X9`DkMm%m(V1EbJpE|K7SmJNPIpcX`PS2zw=w!kGLekNjWpZN zx?cma2>~Wkv2Ge=f82PVeTvp%W3P;pK|U9-u&mkZi-KN{*bnIDF?pXYv4(OvDN5!& zvtkPGfy%C7q$0&pA(d1av-;?lT=xYkNhAZV7rq}Y-p_*=JF37OOO}L9> z`T0URO~l1R^`5d1vt~(lb9w71unC-BMH%0EqkMMu#adI&2Y(`s6l%RZPL5#$-5vcT zaX)mt;Dec9NU>^N_=qwj1{x2)+u*wJl&28PDR$_X4~`H6k-tP#Fy2mwD(Os+47caV zhCkgt01mc7C#36OInks#luZBjj^2(kKf5j!qKycEbz}#{1DwO zX4~?RL1fVj76flrCc#ki%U9lgaK{eICnG<0(O*C)Cql;3xB*dhOwfnR#C0pSMcWkn zBUNo(3eSnpjSK0tW-z&foy?q%Ui#zKfNWp^uA5r%hH|Ln02hCI&RWi;cYtc zIWN<8h(ik`!p=AuyPuA)Px@h)nR^gReR@AQy$j5MMV?6>f=nr&92>m z7K7+Rz&s_P2Rdk9ekJ*PTq;}l6=w~IKX4Ef3|6Yh5lHe%F;@Nx;H1nc7R{|WDAy%28^-mTQ`mZ?y1X8?&S~f>ynlU zXa8c~m*ht&u*J=WzYiaHD1}ELzD}~ZG8Vt@#QolQ{mlXSC(_yib&ewzrD>Zh&SObf zFVu2Wiu}^B$A{^zF&1au&LzuV5@+`rYEEap0ZxUOU=aE2;SK^NU+rUyp(JJ&gQT<^ z7-SEwngXdN@#<4~Z@a&dewfna{#_3w_1*5pu8n^?ZAbhg1~F{C{2U$#DOYMcJvJ-)X?f%B>2KCCoeAV1p@|d}oS>+A)3NIY6NjE*5i~DleL=EZz zv%l;R z{jwqI$;n3s)+)y~KB1&N`J_5(5L5Nz>>E+=?3t(y}wHcftq@M6pG?c zosDC-=>q$F(#ce`7?K09YmV-Ev|y z*aCA=%Lo_Rmd*)Kl1E=*tkJK|NZaTpUbM1_27L>uoTbROfmg>{sIctuiiv<6exNIecju;K5U>d*{#6 z_~0$Nn;WDpxryk%PL?XwenDC4sO^d3tbY4pO0 zE`TKADQP-wO0FNG{|*-bVdL}DK{sKCLG%T0GGhV`l6QSp6`V{X3A$}a-{ng`^1_VP0HX9aHCSMyuBMFj;F@Q8ISRKV^M#eE`O>e|?YgtZ={vm5QkNl*gUaW0%Bu}ro#Sjvjb@LXblQz z8b!K^Z?;!g7!)AtE%9H%R)rUGa=#IA;5el(NWtE&+nSs&80Fg$ZD+8ke%XkqxnkN+ zY3q+*(}%DKfuPGq1mDNnDt}as)<`YP3(O(h1o0}Mom_b>Zp zG4o1~fH~FG+?E68Sg`O;d^UET|*S zbwK6d0ujCA2qyjSl!2cCVL=G4HIVG06FB#3V(v(3u{&geGS*1rpOpbJ5my-t8V4q7 z(tG172Oa_dmE)nChTWwYeB-xTMeHhiz30>3fORcw>B3aXx87gx-nVG9JGF9gLn zg5^5bnCq@2_hAr-l5qFqVserYLgBGUR&&Xy&Z{*fc3KKpLDSFB+ak`CNM9q zFGYud1FsLhzFeB8F%=~yg$YXf${oZ?$=O22^l4DnD9Ae|7!%3D6#GD#076u4!q#?; zaj;k7npTl$^ouHVJA8ju>obpDTp;_V6}X+CK^ksuZ{X|gI>WZ?a-J{lZ&K zM=29<{bt}>Bap>%8%9zj<^LZ^*B;OG{{MB<(Ul`d?pCQ( zZb@>t(@{BcJEh!NQVF@`e%mUckX3TYWvj%J%Si5Q?n}jDnERcXxol=LvwhC*-S_ub z+QWm{=ks~JU$5u$`FcJR9$?OPf9h?%X+k(l6#n+>jjdoue}0FpR)9J{73k9^u~*n+ zY41tqi@*H9uz&R{+^nV32&LsSr;C2-?4OBSmLI1Q7i>Kyav*v zLc;FSM*7_!rtKJg2-`!vGDH4D->V}f@!XXEk?yngJYym`Gsk*wc2VqHB}0HuNB3j<)3c z^Ghx9XG|86#*k%zt3|866sUY+kaJ|Z)Y9(f&5GKA4N&q_aIT&*v9??fI!RW5R?KF_ zN^#4WK2FWLCJK&}H5=>h(uRG-;)V~;S}=yFTU&Aei9B_hP6$Fz2@+xHKfhxfvhTzO zIpX3wd_c46?GfY<0e~^Sje)8>tI$?hlmK9=k$!DQM5#w$_buRJ>9GCHc3*Sm9VfsryX3?iXSeMLhF-)PpIJzZ2Rst?fS>R@dIsKZ z5=FVpEPKkQKC5an`?t2|`~Di6yl+ecdIw626~nSk)EPVYW5P5&#@X@GlUC#NT8Aby zJkDfY%p4r;m|;aUcpS8@UQY8zu5smH;?9*r#<|885H* zaHa^08AeW(Y&^7YbNi+xQ;&O_xz>W-@8jz|gMIFh{g!%gkYN~~_~;ykDhl1isPzAc zNs-{Cjfv(?p>`EQzT0(p%|~p1O#&s9vkFE9QzM!&^cdB01*{+Vg>wgmg)s=0{JJ+O zVPpHvIoOUxfW93TYPHITU|AX!vqS-@OX&(EIu$IDt{h1#i%NT85;4o6fgP{sLb*ou z%qNia?Kl3u7MiDL_R$*u8X#08bb9)!YdWdMqCN)(?0Z7F^epxGhw)J%`cZ9Ijda?vG5h1ta8#Ri1>avLnkG7K0?)evii>M(jIX!KKQ|KWK4ZyHzg zIJrDmqpgnm$BdRU#x7$q5x>e&1xPgT#q}flF;2_JUvPs-B2Z*Rl~(< zuJ+iT*W$ow8&K^h8OhkG@>O3{0P*0V2F72JCbNSPhXoLTx1EZb)ka24?n$x9YgA`Fu=BHgTV5`x7`Wa zyDiqW0SW}YkEZraN+A{0&#x~@x^C%dw8VXtQ_fP{4hKDxz|>32+fY9{d7s&mS}&u3 zin6KSg4Ob(+avay!uN5qCrFI4q1)*zYHNkX4#mTWcT9&Ajm_ibDUooxl7VS{VK8IJ zt0}T$EmKwzxH&{42{>#NA4Wn-W%LY=y)mBDL&HCsfLw96^5E`>vdU@eYv$D%XT_>x zZDI|`uBqvqEoD6kJBiCuCF!{(zpd(n37&X9o`hFksJItJn;|TAl*1{H_fAr@2 zDjkUg1y-sw|0Omjd)pbBZ8uDKsrjM+sUV3e_Sk%7!R=it!>+%XflKjvU2J^%9;6OEGo82CD zJ9jEjjFJ)o*{@Hl1}ln0ngo%Nb#uBlon=gp@-N|e zVJ1=t;_v!5ACFAMVnhI_Hu;ETW5DS4b^aJgVa>e|TUXr1LKVLvG^O;(&#+Q=c>+Fq z@ZPoVAjif&GU3iM{y1UT*4*nooEj6i!}ip)pm>)VPhSzQ*6LAVY5pu#VNZg?c3<`2 zmh|l;1;-vS0*b8Un?qIi(-n#oWPCOrHtqMSOxQQM5gD=5NB-g5%|{q9tE4`bqRzgs zgi{_X{c}Mtv9>mo>7Ehp)UKMGA8QosefA2eI%2jdVmR6{3O172IXC7 ziMk4T=e~2RlVwZ;f_fyZLSXLD8+>%y)#{%i+ohM%PD$77d}w2?%J;&1V6+j&ImP2w zmSV4L+;tfyQ1neu?IMQkB>r;ExgR=7wVyae9`|l88m$DB-f|I?cWu}suoH7mg#kIY zf^;iTH~1M!4~9K56d3wA|C+S=ns5U7^851sL*Mv)8*|A<#RlaUwmZtjC9EVzYpfiG zVFw%Crfd_5;*Az8^Y-)HurPNeW*bAsAq#%Lz8j3IUPF(19Opz#pgLaXj9ijjOwoSh z(XF`=kh`5i14*#&Z8EPbc_zdgwFO$ z8(vV1LwLN52+!i>bmVHiyfYAV?u^>m*R_wPKLCX8!S40%GIvYHfcGLFBk41trH~Nj zqb?wm?zd)`wMF==gS1Cs2Fvk!#)(O%h=N35A+tRM9K^IrHM4wYG$1l=Zmk3Vluv69 zkmlugjsQgAUVo$0M|U5Gi&EPOezqsbN`Hxprk*)#exmWxx-51Ll?EZxqZR^mgC9p6 zi7>HwV2d1&6*C~83re5W-^t=A#s;FyMu>y=YeW;i2RhzY-uwFAzwdTW2M+n{x#Uqk z6j5RdllU{Ex2;p#exiGH;w5BFi>y}B{h{sn;|3zYxrt?30RYn4tc%6W#8W) z`e~NVnv6lv3Xk2`Pm+^8_Nj?bLM#J|>+b@#hj+S>-zp26bg1Z_`t68!RW;*KFk_Ow zHF!Bk@z6)F$gu8Ff|x(b6(fezEjWSmmdq&PI)BdFsG)YxbKXWs_g zip?oKBLCX`bES(aptB?x6(p_V85Qzx3aD`lEDb^_La$c>t|ePlKj}CpWpSphr5w5) zMoWp}55(GCKO`BcjVgoDs4DNcQ0T%YGGwxehAuNd>xRCyMRuKa|qC2~8UF ze3(WqEe*)`0*3-7XW<+vO5Lr+u29vyV_60?v5$);4Z)o{)KV{tJ%?+oVkiIAU)DF^ zc1rm8h%OYFh<&FLjgjeKs5pRBUV=%*0Gvq7JGjM5>uJ?43PcGF)X+E^xHqGjgcq&W z#$Y(EmlS_ju2z-0akXWN`MG`O_G-T3dAZ=OMNDr~CMNkdG7=C4!9VV(EAQLOlFKlX zP5@pqH3T&fi0QH~9#qetmX>Rk6c^$yqy&#T>y;8RDHjf|oZgArV=7O018<8omYPAV6bseb4KwmX3nzS$YVtOF`XAkF2+_TJ z_~H2sRd09{(efNx(i?SaH1^z~S1rGL$tcN)8cY6E1Pz4ze7TU(KH3sU|Kp zN*tI9vfQ=<`+)e;yG57vlPAu4&xf_+xh}*KPPl8a@Dx+6*-B4@3E6haH671p*~9lF zbi}$%qvo(}{^gDJdT(l=&7|JRONGTl$OAEdG2yL@19#b-I&R+I{fYO8_0a(v_En;X zH(F%7z8YhKI*>b2E&sx&K7>zJc_ocb2ejstnqB7Yns%}ux+pu0eHz6F$Ii2|imFWc z)7aV)@f~A-jqRBu`lCix5l_QMh^^QJKHXMyk41hzruG2c^glzJ6>y|CQWD z_(L&)Seb-rp7*BMQP@NpmOREZiISj=ZyqZDpd6AGNs;Ox+CX(AIYsW;2(kRsME};3 z@26hfSm%L|NyNoB%qkQ5EnGsuHkO|Rl^2?*8sE{nL$=(UWP>FKK5-+pa(b<8Qc>`L_iP#w4?A!! znS3y!p@Uyg`1~!sdNz}o`|{z?oQO__(goF5yMVc!KuVP>!36&>rrdDxcH2fSyoQ+& z3J3NKqkV`c|I2YD%QP?d)QvH68KYw9Qw2^IYr9jrB$~M*6%rs;a6?OB(ty zAtRC4HdBGfS>V+JQP2Z^)mN7ySP z2XH>lIDsuVBw)aYhK_J}bywf!NC(PmzCS1;C1AT+W*@*T+_nC!2PpP#jOGL~k>1Y> zKG#eE!Uu9T7hs&fmdouRpz5@1%)Ib4a=3!s@Sn(gAquQ(YqO3ae;Y8SS6S;>Vqq*! zC>K%j%T-Dkfp12Pm_hsOeE;O9#&4TO_;fDG>UjCh2FNe`9A6LmFRtSY-Tp_poY(%T z;$onx`w;Y?+xc_Uod~`Ax~u6dyVjYs-r;erG90?}MK7P<6Lj4WxQ&cphfzFBPKm=H z5dib#rP$J#Jr+T@3wFY%DEQh~Cica0%>RCgad_z{K4HFlt7b^@ize8O4eB+_SmTI4^GkEy{VlSHf3VaP+3Rb5adC+ZZ7yH;yFrsLzF7iqHf zRYkO>P-o(k2{x7UQe8{8^};FTz&vaG;Z4=i+O;0FO&fW>Bvbr`d#X`#M55k|xmk>+ zmW*QY>DOi1mU|vAbXtn%^`%t)GeLLLsmGRdruOKN2&%8_XO$O28Mt?_vjWM5y zBY!Q1>+73rNedOAZwRJ!K*FTh%6(%?))! z4Jhq2Uigey(r}e)Taszjqwl`II49}kU%ER&F{i}N5l>v2bOzQd3VY&PM-T9BFac+} z!eY;w59^l7wA7p@C}jz<$;poQkTBcr0F15dqS5}8{ZMtxVNd{#fqldoM9dA=IJdBM z62y8D_SYC5&mxtT=}eZ3(NnQi_SyRAu0OiG_aELD6K2w^birWBND9Oty@X#qc0U-3 z!eAOfnCRB*j2H#c-g7L^*+AM#b?-cACo4-%J7I3WiXwp(e4VzK*^E-9w5B!T@iyfC zK)t>Het?QsSjj?~jJF5)e3p;jGOYnRG6ZNs-@0nbn0-a=LH>YMQd>e2U^@iInWZVj zyN9UG+ybDz@%PP)gWeltZB?f!gz%dMxAQh z?qqtOa<=2HZ83#mEp#)14KTmyhkUtOF`hkjHs)YpOJCc+)fsK7n=3q@P;Xyu)gV@R zMo*S4G-nHd4rHTR>~#BGNB@&ES7ZzRPH=iM-*ziV59skaf>HGzE={9^rE$XsSlwNL zMPfnDt%8Sz6+>ge{4xcX4^{l{QHv#CVyS+OYu9@lhddfz%#CWRVXd}7j7L*lDf+-9 zD3mS``z?uxs}P!)OFu`IwN);kMSa4io{fONV~}jE>>4#$j_zA3Pq&dM8wjX)kU{Qw zQCWKl6VPC4twYVWX^kcB1`?P2w0WTDQR&_d2jMQY0Dnv-O2aVk3%B?3ejWZ zIbDDZ03nBJeg|k-w-VGJJ+zoLt(D`L664KEdPXbn?RYvD-^FQxuvU;R%N*>H%+Q`A zrJ;6|g1~bM@#k2CUFRWE3;#euh4&-iT-hoAu1`1bj3UPnW(+k!iEU+KB;K=*7=u<> zW2BT?d(a^-#n1XB6ztNFm!?i`Ebd`8=&6ffj2Hd#KTJDtR*DJa8KIZwwZperc!%ObTfJ5zsMk(mjdW z4$wDHMEgLy2q#^6XW`G>4Pg?>b39Y@nF=cqb+h?$79Z{WqUdZBNU43WwYODxw@e0FXe6);zPyt2 z1YB$?p-C9|F?8QGPt)teiLgZQQXGZV6YQwEiCO}8cp2&i9ozY?Av-{;hfO3?Ky#vx z*VK>vR%3ixNE?~aSwdYc-ftti+1Z(QBx$w~h5t{aWIwgkI~=-?#3qQt`I9&$M)Unu zJRD2svqtHoxE=7l-2G)cs{ZoW9t@A`BgvVu~ zKzsR6xMM9th_V7xf1SzZ2cqOf6dC*yJdSAP@M@Iz{yXG?LL+E#6amc>I%s(cHAse( z8nCGUL>!J$zs;OY>idYlG}^n=_yz9%J)%u5<50=Tg)GGV@N6V*VGPNIjctRCM;~l^ zM!B1~#}mM{Qt22RzMM|g?e^w_&UUNU8bS#pNC7mkm4N8Vs-*L<>wzZ4KZJZIP!WkW~XbAi<3ge5~re z+y2+C@-x2K1zA%Vb3#;#(8nm*ir#uSn$6pQ+ei>?-L*Fw_Pm!u!EIh_3_%7cDt0Mq#OVnDboYaO02fzm%_NJnThpO}%jN z=mwbqR_Ui4j?K(UpKKFfi7g+x!HRMIcw(!5>;6lM`N?UrpK}e@9W;=8qW$`tlq-vz zZPf=ydT{$|U@Sl)N@-JuW>vx6U>M}4nTh9ph%`kc#hCA@+345Ra^;6ZS;~StO^G8P zehttZ(EGUmB7ij&93f(cDKQftKwChIp()863t=;#svNMpzm= zNcC!qv_IZ3VJjBjb}O`xA@w9x>LB*|U~;q(rc>oH4<%u3dmuhU1Ehu#2Pd4dD4>|F z&P>$(kf0^W3%}k=5*q#|Qm7a-y0spuig)IGgwDpCU|mM*=GM2L5!O?GpZphodoF}8 zVG|q6SZCN&Ol350=+8m$Ib`~P=JTBy!dp268Ra};TdH9cj2x`BuhwX^53^$-()M#Z zTQ$C;@rrFIwDDo1I5T+?5TIN>wNX>-f+lA+{AYCrYMm z;6HVrzFa7tCm-tO^ZLv;O)lY?&t_%Vg10xt#dmg^sasx4_B3<`RfM`2HHbdG0x(?x zuv{j+{VIr!5_}wk;tM<}%9GAqB?()0V=g}m=~=Oc zNc9>x)yd2|R$4%CfnbsRyS6Ju0x<&bawppn(<95OQ8`{8N>Uy}(>kq@`e0|kv&1^F z1E1jTD2r{TnQn*5IVd-Z1ohZb^@RkU<)5lI9?NjPlBoy*rmOzqL^qjx$4eFSdaHj!i!wp%lOrg4&T^ zG#wEqadQ1{+**`sG*Xq*ZKY)6ZviDHbm6)eV5cQ>k=R5OeZ@hguOPEa*dsw_pjl9! zFXh9}n6l%Eg4nuWQozA?$(M6}2>VTd6EXO2nOZ+IobsL+k&b86i~T3!%{5AGz#QW4 z22Pnbv(RB?Q*L$IR(Y&B>9OnRE_=y;z5M)~?&9)fLRZ!78K?QS*lhy=t?LBCQ4PSl&gJ1P(pt?EE333dXZSO#shEi&|& zj_ZUqJS|1|aT7G_INkGa!gb?GqZ;*I@Hz3bC^+(cW7L?@lRO!tZD+bAaRC3>zHcPd zM2nLqnV5d^@67*1bYb@vH^c72jSMzSqN5=gT4`&X?UB6MOT+PEqo$CkD_d=5ERP>L zmNF(1u&bSh{2&aFdxcNj!Sb(WD|4#}Kng+|i=r;P$uLLkyHUq>Y6#zQ=Z^x7`lC2< zIj1~^32AS@AFhTbGqPl?go43MGh^V~Dl~UbrxU-_EN4z*N9;7c+ZaOU4}EC zF8PaWpLn~c=P#A{2fL?eFKpcp3o}C3_Q#g7|0ZP1hN?Z*)FAZo*<=KQ3C5@I45QM^ z|4bh)sFORhAtJ7?Bs7Y8tc@Ty+I7kSJpl_Nb9S0H^C0E>QIt#T-H6b#Lp-T)pdQi5w~1(s?Ruy}L3+k)8X%R;91D0wbBV za_lC0rpJ)eqff#i2ZSd;HMN7^-UnBXFo)#DrU!Fn5X_E}Fz9@BRSleVjC->POD=yBQq1d#S2CSpJgVOS$G38U5GDW5KHE(7?C{Gy~cU0e$rVAKdQLGUxxV+g3&FwzOdQL@e>;nzL*9xJ%nPw{0Y{ZIZoFx=2i9K~?ie zi2;jLU(>-Y`pV?GNi_9?dHC9*Aqvd~3TM$Qk0lS*4G*{$89#-5lreL1=FAtI1cNWM zUYv(#&O|~eH1EdOc^Gy|A1K*)_WK`f;db7H(Ysx%S0GfAyR{E0U_(H}t9;h=?52a( z>&*Z#f(0dxPlCHg(4GGc7oZ9}{^ti zU6%wL7U&c40a3yQfNvPZ7{KoHu&kr2J(JLJZapeKq{$M-qJ8vL^cBSmMTp7}iP_ns zb$(qjd`&yu0?cmkH00Ch8$uJzo_=~O=1uj_)}j^i+RERwOq6w=u1Y6~wo zcgh4WK%XH3Ozp<6ZL$oavx&O>A^44sTPWHVU(M^!2`@ZFkj+@0NpJ*L3`iYK-+T(= z{_zwdg5%5BK0Pln{Y0c7Qm7`VDQb;GS|%3kJx6GY!zY0mn%~<7rX3${V|r}7kJ8IN zZ*F;OqW)f6I;d`Yol7ajS{=?mLy=aoi053pk?w7ni z(Ze&GKbVH*k@>p)N&uhTQgq0i^wG=6gw>05^z)ihahmx`A3Q(Ntp~oLhyy`@koE2ViTZ@DU>OwOXKwPkMQ6 z?x*Z`&IHxHOyUiSbi5n0p8gEN*G&`PKQ)atKE|dhRG!Z{)6w+Rpn;FcgX5#P=y;4; z)nKEEU%^s%F?U=V`J*)WKM~kwn5;?ELBp_>QC;>+?@$p~R8f z;oaLpY`AvD5!8Lnw^$RM)S+2MvuOhHY2d^$)}aenFM|mM#`Uu)y|>oM%eZF&`{KW` z^l_Z~ssr|Fm!7m2gKhZj)qqzX!RnbiUN1P7ob6sDa$Rq=eSoSG(A$=7qisgF?r-#2 z+>0%^gKJa7+|850?rPpPX_t|O@+!u{=!l%jKVw8c@}fKCi9<0OGrmeEqT_<^SJH8! zCVUzUai33Jv~4}fDLR)NJ-QO&Qy<$==6BNtv9WEIYSEvtoBr5H@n&|b@Et*;Whzfz zV(;QmW;^moH3=Jx=Dl`Q&LY?9&#I2TM4(cNi|@<9+q)5NN$~mDDSl6eN9JZM0`>Q@VIr-eIvXR>veEYzT0$@Dt%&g)BN}`)7zLJ(8L8nNFpum z-@#1&M+jSpB2TZSdZZ#IV&O;7csuYWPD88QSFs8;IX`(t=GigxIS0;e+V|gIJ~wo? zuCd>r(Cz2v9~&J7%nAe5wq3vnB^e{1b`kABxql>cd*9zb+mXlnZ^j8lo%%!3%dz)n z9mvOgFQoRS#c$vj6u9cV<1-RZC;#{|Ze~s)R;|HjJPH=iSJxms`^Dc!_PhiS?-8^O z2TFofh%J)~pzkP=7!4+W;xOR|u}Xe3d6ftErg9Ej4Vqya@Z>E-jMZ57-+0x4}C<7>t)evP&nK%(B>6iU=tLbn@x&P2?=fpe`+Z~F`}rXAi_ zjRH$##Afjd2N%B_>vbHzy0A`kAFr6~Pc;m-H-2lRr38jD#tppIBxn#B{d$tfHOXl3 zoHF1a`Qii6J@Zqg+4il|IIzG4ge=IaB>3w?r*Wgjpi{I;_~+gF0vpwXdP!q*6+I6> zeKeOL=>-n*;z=CuZrPTdrD~e`WZmKe6rNJ1?EwEgon*XqVsa}wgv+2Ks2FA8ZxN&I zDQKZr+ub+rVqh-MLU8fi5{GTPFdk-*GHXm!EnyB}OQU^JKTmam*)O|g#suBJ%^Kwg zx%m^@(dzCw3zJG0(6)TFSL##j@^7NZACJ?rn?N)ziL+1f!c z_e8>FI2m=QR!p+F@oI-6>GQTadq`+=RO1$k@@h!W-6*ahKF~7lf zN*LWjSvqu(uD5#p%-vZV(!bGhskNZpE{VV^6PVwJci)UOb^ON|hPUHv_UMp~627x= z3A5~@*@I4{9j^r;V2|kngU@G1QON`_^dvc@TtyqexE)9X#cxge9v@@9 zt3cLvfLB5OFhb+4*n9l8vIyd zs|)Ib9SS9;{3oISX$UU&a}?^*Ix+fGPP^m0_Hb#w&qePON2p!U_w{K}yqp{jPDd=$ zrtXuT3an<74H7hEV~CGFpY%|1nl`P*WwZ9r$MZi2SZSV+vzI!XdDh30VQ~U~f@skZ zWNP>DyY@zKBT)u3#kMMzNuNG5eAio;$#Ly1NN!!oD!JBn3t1W8Dzp>gc2nAJWttyz09*w2S$bnUHt_{__}1EJa1- znn`4>o0N6=s{R1GSr*@N?{!>FKlY<2mV~cD-A6Ia(Zk$;L=cQlMf4}XE)aWR1#3!Ir%=D?SLc@|Cg?mx7PrY7%I4Y* zK$XH`Esqq((8db=`-(xbnCsMS!*;iiY@O){l_Ap8a#_I@chYYY$5BH!O5+FoN;kc= zYB{i^^W|RucY;yjaH1t_EU3Ol6`d~fTo>s8yOQFO{EZy@qz`sX=g8^Ddd)`?#}c_FMS~gRNCaj4m!Tl*SOR@ zSQ?1@-f6In4Cv z^>z$A9``%O>p6c=qZFnVfi5nw*D4$ey}djZI$YU9(B~0b;k%$+yqtFB!>LBydTrUL z5kK_>SYsnKR4Ly)QFUG+t1NQ6$#8-A3auDHbYu~qaS)QeGOv8}vI~Uc?S0=a^sMhz z3_q44KGBAw!vudOb1VBGt%$TMJm5rpJ)6-yaqwVw(d6i}{4Xr~FMqEb_PEd&a(l{u z$(jCI>#ON)YKu5PFu~-B1dxY+ptQ?D~UN{dH zwDHExP(KOfR4O>A(B+Y2P?#yS092=m5Prlp=2e^e8!(7~X8_!v!gC+W8;NTSk`fBW z-Y6M9S2UA`OJi(#jdIAP>gx9>=wqMoesh4Uid$&>QSmzHnsBop{=ged8sAlkTdMp} z1}5>h%21S~FmCfIs%%bT9*X2{=pI-o;{GFO_%{aFQkah0&bvyt2Y0LHMu!O=cIwaa z9dL2q2an>b!HUPO`$Y0*0r}00_|c-cR-vtW!Ll}%{ils)VSQT9YakOQ+6JY4Ag8oUvz!W?^^J8|!x=_6L5C5blHWXgznD33xM|y@Afb?3SZ$4m&N9)Jc=h<^0Yn%4qUH6)n3+I!}FM%wh(quaWem`7b}Lm;PE3FYo< zZr%-5gh%c-;0j`e5&wxCB^veUr&gc8k$*YoU8cwEz$inb!9}v{6WPC;TPIX#Mk-FP z8%FHEe75WmAr}3sp?yp!j$dLD!Uz8oDcY_C_$arAW#7Q`nkFSd$^yCM8I$39H?^6( z>NjW5h>HGmG0*w_=$fFfEzrcRZ+o_zF2z)C--5=!Se2G-2_^VG3^hxi)t^Q~Yz;6Mt;-`C~l4`#RL^T3Bq9 zk>gU^yl$n|ifyq6D)`Yikf&%26+;YQ`Al4vDWaX-?Wd9ec$rvT(tPf0jR9A9FRd#H zJ^{K@V*u-ZPr`-CxpZ52JUcoawr*h}6h^aM=jR1A*JS?%+p?TZI2jWwbUvpPZ3!H# zqv&|VrO_sID)5GMC64rjfH5ZPhA`x3LwMAk-%qBB7zMW&)wQNx5L{>$#XKf{MDvc~ z1X8cQw~b^x+IC0&%+^l0dS44E8DZvF3A#)#krh_i`+sDQ0f1%qM^(b6{)l-Giun)l zXw2K}T=BFNejpg$yM~Ud-$fgV(U=xqIdhansQY8aJM-T7do?vrW1|OWB;OE=u+LfF z^CRpPY!A?1{9Sz>NO>Ob6W^6Ro0f+DD{2RtvNhTX9?J5qFsH1*Oz~N4H9j#z%A`B6 z##Y@i_Z)E;RPd0B2iEj;i({j+tse8)cO&lOq+FJ^I`0umCI=k^hdqKUV=j$2-ls|? zcuzCCg`Q&Z9qJcLQ#z*YWk&F(NA__3Szh_T7K}ag>-?w3FgnV!qHL9Hmu+$6mBr)s z$dwmSkTDY0LzHVgz>0Qf1|*qMOO{@t?wXnE?$*>K_Nud}ail_I)e2l5HT9AB*`YaL zw`Osfz>Ly(sjP~K^_rI0=k`30L9)(xne#=$+cJP;qY zG-5ws@{<3WUvia(my*LDKhQffCZQz#$izaw1*+>Z zjd!)|aE!Xno0WMoJyzfFoEZct6`zG?MiYIGx##z(rDrxg<&UxS!%qtD#`Ozm`(EAd zzo%}mYS8NN2`flq)|RMse90zm_kE6D3D0*vUn+bX;}72PJhE$2LY{5K>{xq-RG;vs z2?L&lObe64F~P#}psiUsF20MHgk=W8I}kGG*fB~x^J&yBy3;nCOG?EfochlOcVZm& zVR)V9fxBn7dQ)8*9u8hzI@od7<@bZOA6sN+jX4IUZK5U@-FIwT{m~qA>Db_=M2?@n z%YH~c5^xZ*^g&`cpiJJOdcmVsbhS9^j$G!!S!s}apr5SMtzD~^KQJ%n*>cCCxLDm< zOrdZ>?O=3BpyQ9*b^4x1QFia*QO+pmR=1f3VSp6a{i{K+_dVfHd|w(g{mOd=&PgZX zXF+)Ir%?tf8+BNnfI(TbpM~}`|d?i5wYKAEy&zk#1P6zDp z{p_8ol{TX#IRbQzdec&=S+{o3JH@C0R}qx(W+G2@a+$%rWn{z%*xNR%CG7&f?JITm zL1?$x3aH;=y-%U$-X?QcrWq%p2;>s2dep4HkBbWkSlzt{CyVL|#z}CSWx{ixIH)`mLx>Fe!)DiGMZghBMeMkJsLaey>;kT;@ z5qOoYZ64_BcgCmL9CF5@3CBn7y;2p#dK1Jc77Z}Bo4F6n`(MCerN-a}*|{uyW2|7% z*bkztJ{m4-29l+?4R zKh%0l3TtkjpFPN?y!eZ(DD?{UwM@hUDylnt&|+nSR%2RK;T2hL<+<~AIB;~${74G! zM#6HhcPylTnhmPWwbzkxnv_gxpHWtkJdbry4QKmp^wFsH-|kUb>-Bw*t$A9NiJ;bB zp)Qs*7_HGpd-F4YoI0eh)U{|NG{V5M%07>{)rt|}z^exjn|nzM`+VgBNk@ChB$h}H zxU!*fvg0uN$M!gp41~s0?7cxa+oqh+@8NXX5N~WGJ0C{Gkp(?$OEs(>{5xoUn`!hO zHLwl4Sa3>#v;e$?G|Elb06gZ>PBiumSf2fJLFGBAVuLiz7;JJdFPHqF|0EXdWMQ{N zn~)xP*9&JOCmjkK!fgXG%bL)g5gAo-6d{+-2bvv_t8RI6Mt>`6-NXi^X#g@MiN5o+ zTu&9H;7K?|na3>|H|*Evi^H`LZ0bb4rLW`mtbkSn6wzGHEIY;RUZ>3_L_n#m=Z5+O zrMvOrykNDC8l)2Q{0oeCg0}}`iZfn^BW$#jc%C!3)N{gXi*7Cz#~#B2FF%Nmo}E<1 zE1ia=!ll-9eVcA?;SLFY(*bgP8oLFbEZ$T+IZiLX*tzQMZ(Jhv@Gt!(Bk|~<31VHa zCJbk%onV)FJ>;yibcTvXtH-UaKwim3f0kq8MdHGd>Mv=GyP0-;-`0RVjNWDnx$?d> zTUF*+ha25{n(%p``m?3Aa< z>63CKqH=zXQg(!1%O8tTw(;4}4VFn~o_BB%Qar@PvIjonY!g*MCTPDpaOYwTLAEB- zy#Jv^bZ{I!HJ8(Aj`HF8Yfq0&CaOM3(Ay^ubGMXAaJbQ5p3(Nrxx?vJInfEc5QV#g zw>5OVk?EBzz5mlv>3+Lbz~9RxWT@^7BUB|=FXiRs;eGF=UH;(kbG##ED(tj@AX0_3 zwlet@I>u84hmvHY(hOcSg4i~qmwe6-W!>+G`fO|lGz-7|C$g<0Z(5Z4hROYMBT;rL zWr~{bXjS3*GGb?ih>b+>%NDsF8w*pM*rV)h;H*i(A9M2ToCv0}Nv^aMHmRTb^^QTj zKkNsyR-kKT1CxyV6_#aXFWnXEA|3xaNZ)xw``qzE6_UQuMb7+H>dWWNH(A$NPX1?e z_!zwdUU#~AA11I5h;}?(;d{6j99T^eJFGpYvF(0}BR(0MmuIc}OY;Ftt@`wwAWc~G z;ry7AbW>!HHs!a*%hs^ve8tqLR3jROM&<8?0MkA7miT!Yu0+_q=zQ$KVurgZrE1)e zcBy}Dg~X+Cf7cBzrbi+3M=e!%rs7PZyte9j72CUGkW9ynh(;;Br^_v%_d|<&{VOU) z&?dcw(xFkF!p7^an5)w%pCUW8)V|h!${YkHlujIUF*wQ9ea?25zwG!px-Z#%y|u_< zI`?i_<|)DVEGCSLR67@fnznzw-(g!*ty&@4JG8KeL|c|@!)E9V1ycocnBh)?V*_7H zjfcYwf zsy>*C2U+TUmHtQB#I`N34KiHl`^xQ&=j^vPaVdqVwU)4>&jqOjKST@U!Ly*=Lj1DcG_JvwP<_sO0@ORH$f z4Pdqjf5dQ`s!BLz{KhDVykydVj8)USo}{n`9u_V|b{BZ^(eAhcYva}e?{(i;%CIhq z;fa-Ot7xWhC#ahz{0b7gNc=Uk)Xl0;(mFgxE!QJ^ z!GQc$Xzd#LPHBg7+y623?tx7A|Npk0QrV@V>=<+xOMCYd#m_xL`gc#s3p>Ka-6Z>!IF&@RM@I z)2sQnRSk=DPm%pN81fz=oNFjhvL<-o)kN+h^6cL4eI^r6kk(rEh;Cdk;FHY;*__o4 z;2~{qVu?#1bfMKM8N4;y(my4zCG#89q9XG{(8_MOiS5tVEh2{z@@OEqhmA;G_FT`s za_!bJV3kcb3&{gRnVeV2th7rvTBkq1E$v{mW4ew&RJMw0I}0rIIJ;Z$l%RMbB3S;NoEmhWYeI7Rbqt&L z+1(RVG5SO*#2XG7SFo7yfA3&&8vgqv5NP6%phWHqbh0pLk~Ts>MzAFr6#L1mLc(+O8bQ z-V*$0>(%#lFjn0F2aEWpLS`d>WseuDlgBCu2VOf&V@s0r^H*roBapTnJw_ zl9m0a)`EYqW^P8ecqa??a*jV1llf@yw6=n&bZMSbFv$Pz?B_#=A? zNQDUq1kpD163JY)moD?;Q>eN+eSZmq%r>c>t)r5cee7a%$$boA{qIp}Vuucr2JOA? zRrm4>`01#8iWG*17jHf?8@FF69&&Z_zpZLfYm3U$V~ zY*ZhMztQ+J-wzgBAJ)bjUR2a%k=9ciJ`zuZsNVs82+sD*#7(5^wApiMh6 zm3f&-RL1j{9%57ZP?l=e+%GPcdy}4Vh(6Bxh@aN2zXyBUp4h4|aYPht zsOAkQ?aFA2az#BsX}q@Yq&$R#!K3&~?6Qzks<{w0XB;7qm}bR)`ByYHY7TGSw;WL= zISb+OGNs&Ki%7qSwpS42IR&;FhAiTmkW9i=BeN>$@Ytia?TFVNHo5S__R^}S!q_8loJs*epp!#Me&mevvP-%FfdE|F2f?{lk^v;3!ZyxhO$3u0ix zv^$86-x3|FA0ql7VS@}|B3M?R-(K_vj*^rksP)gF$|E-TB1jEQA!gdLEVzF-GNh{NXB-GOxXc&h-Y;TzNFb7ZPhRn}J(G|-D1f{9 zoS}E0UQ&!OCnpz|*>pk-FW{Rot3_C(_*v&vmtE^BtXs2eNBoY@DX_!9Su2B{5r6hS zKasxOGmBW=%ZWu1gIaK(r!)IHFP0###5I=(h7FCZ?hNB{pqNn3#ktpqPT?%bTpG-~ zRhqYAvhC-Vr*b!Lb2&;@|LLlP`toZS>~q+JfhE z(%?9f&00Agpd#TTKj)UnwI0INmE|SK6^eMSpKDh73}CUz4BR5tdRAt)`_QUBN(kAn zih3E!hexolWuO}gCU7K-d_mSb*86ig{MyKvJt4F6)sDxtjSUqY)Kn+6NU&6snN=NY zx&7)-yQ-*ltx1CpM|K4k%U4#Rd^X5d_o^B5o}dyygMfM|5A$xF*boP+y4G?|x0WgB)&8iCI<**}cNgnK+P8egy~Vx( z*f81*@}{+l>V*^;=^Ok{aV9mUl*vz`uzwcvJZB9glL=O^WVR%gpL$%N0Z!4tiTTJm zQ}gFR%V=!%nuYZ=C)11(cAASYBrWsvm5M=`K4KHfv`iGIZ@Pb`SyIkvF=-XuaJ(X) zj~R7$#}oK^UFTgTrtQSJfdNA<(t-8d1NrW-VlH1Vv~`@Mq281R?6#Gj4Gq zY2AWw#1OzREteX$%RSWZfkZ=K<_W}B)yOTrORA?e|7Ru)>t2x~B%J^3QWQ_HwDBmQj=WRq zMskU>F_!L6N6oB=2N0@{zYu=q#g_uKBLXlaOaDiJZ1xNrjlqBfOIP!%2 zdiX2`-*<8IuN3uDXCqH|exA8+@&BQL&r(gq0`V;bg+p^k?T*FxCuYC7ir`*|q^$Te z&ffErg^D2Zi`b{3Sg?w;Rt9pgi7)izeRw?3-kHM!IeN?%fnAovwaBnpEMYQlwP0sq z_jcE(@%Bo6INr@Lh)itYWWJMN&$*j}(_^K?%KCrl@){$sibGR}L4y2zuoHpEazfYaFoZ4h!H$tfkySTXvtc5dkUEW9 zM8}LV_z(c1@;=Fe-G%^Pk;tWqBkW=v&?*R3pp7A*CdjBeY!3gRO5!Rpi*JR=!C%c3 zQlDPEpYs>eA^CIWsw5sk8EWpfaU;<;r09a|GoGO|6@}SFNTK)%9Zgg&|BQE;Z7enfT*l`t$168OivZV@+Yg|I zyW8+1s@jW{V9f?1d$@-rmd?NgR`mJcMWjS}8LM)1TsLYwMsnlY z_XBN*jtRfmgN}*uu81& zkD(3&4cO1+F1kVG&dY2jH?NgNOdg7S5UzE^d06|s0zH6g&hUaK|BUr-25)vhX3E!; z-lu~2YFC}Y3-8%W)8MJDa>a5>>S;MuZ$f3KAS|Jxv2?NJlJ3C|;Hdl+odghnvrXKT z_BUkFzVKnc@oJU5)TUdCAza3)23*5FRBqnP?3-UNz8S zSmL@G>8`WdcpzRldV6 zdi^$fJgbcPWCK0UiVzGFSTJl)IBr)^w4M5M-5Jb1YEYoRe#`TZNqY}`Xl!g~pXt~{ z-JO~CC?;TSpFjB?@GKYuSH?38P&u|3!#B$IGphOM38#{`oS}%X&)-+lWzmycs3!;F zBHpTXwY{*XK$|@;?_{)GY_@+;fjG~%_;5q`(2}|4)c0tYKJU!|Ot*Fv*sGbS#Ll1o zECy^VrgWF<+g9w*dF(|-|M-nawa}Fvtdc8P1@`~cX>e_kc~_qi-*+oz5#LjfP<#%B zjJ31E9Kp?ZmEwapn_?cO_!K=vJx0{k&j+%A_(GjTregHaRdT#zU&OJXm*QI^fND#o zU|^qsubYG$ma06Bsfp4&t23!@9zi$0uQu%tRs(bt-!y7t$zRRS7E z14@1x92CI2=WGZiq!Lyd7h7#s00-2P19ISlqGFHcc=gSfvQ}R;G&^AI z%1~SL)L-kXd3`_M%}I?L)q;}$wx=1`BTJnPQJ% zI~-*c3V?(NcQ6WRK~6eGYOn_-V#3_s@k->TB#JLFDMWc;IFcJb91OvL?nbAHZ1gh_~#{o08@ zJEkz_Z?5)yVDoMRo>j1AnVs*#SMD=%@?%L%7!9C()f2ToD9U-3WvN7gm^71tiUv;;DF{AhDBZVhadt@T^4;efiXboRd{#UD z6?0)U^0wY8lqk8Cm!_wAh;roH3Ls3hT5>aLK{VQN^voLkjch=J!xbT6?& z47D16)4VKOGf&g~utMC_U$(2IRq7eKkQA}3EqU@cjLpPF3DWCxxBB%gUTyeP&&3Nx zc1!xPUgQx_=KctxX1S`=kYq!>5#hkdMuJ;EV~?9;;^9d zpouC3i-mY2-<|V`28sno*_M`*I;3Za3#fionf5C|ji;DAj+$F38gnc>*=weng3+(9 z$IJg5yFL$3EKD59g1+|S^k5dg=+!IL$vrm-?{=y&5?;dM5@|Zgo-8tx_(SkP^)4x+ zF*qcpaPJIeXFxPzn{TD6y-{ViR8K!N<&@mp)o7c+VTu*nWZuBgm|S?`R%iT)r)HyN z<{iFXqavo9AsHs~6%%Sv4@egLNQztoI)*V5INhDtWQ1g%6*v>t;RyIw(kN=NDT>nw zD&hN(j@WM;rWmC}sk`~(8nl7mb$4QPSO2;L2WsSc4qiG6+TluK5io*%K*+Xqw=ZLM zfe+#xO!s!oJLU5~>N~p69DRisPxZ#xlyKeuDY!FH)2{~~6`+2i%aQDMf&G~6zDc@5 z24xA}$dh$S&xxB&i(3rFKOdg%4#nBA;{32YgnM^U(r13F-N<2zpTMPh;7pJ|^PTgH z8+LJfEHM*|GpwL$ilLF2v^5lA$oRJS$Fgyf^mj{55_t!)2xVQg5ZZC*dQZZi1cT&qhIDQKvPhU(uu3- z?XqKOAAAG9pQ%HZ|~k^Z-+4(~G&x zwRTxPKK5zXq>iHPm2q7e{v3_It}*?ezl=y|6J0W+!T@-;`@O*$xC$LfUWc~i4?FpM zxmv4M?=XI<+nuYIcO{DeOYWYM0>&Od7ezV9gQU16nEn&m& zneFfFuvHC;#$PQN8adO^=BQLETa&qbHL<-*oNB;3?R-0Ji@5SITDh=X|MWgnsevKO zc7ON2ncOVb@qOng7C_T15@AbQKy(sT5r9v44;P|!v0yvz^^q^{7|n2mTkGMGC&Q5Kd*Y~qGFC-po!u!bg8_0MejO(L2SP#a@}S{$_6{3XZJZNK<$LE< zHzkJSt|w@XRF{=%^8%eC&^vWl6m(v2fodD_XFU3^iT#76B?jo(!P4IN0Hr+tSTCQT z8_q8GVus}m(@sOLiESC0lX?$L=YdiIkbo!43jIc60tg{sWTBn%=6{V=Z$=12hS5Z} zuy%6M`@SqWvvC=*zWc4uq|5uy|Bjg6Rld60t{OZ=r4Vw-s;4NuBgBam^t5!zTl0ny(c` zy&3L3ur1Zu`)UfvX~*-kawcvb!Om!GxX&PoL7i{`1*aZ+g+4n-o9WhAhzyXi-z^*x z-+m7r5a_)5{$~xYUu^dJWs1 z@DAZZG+SsFyG%r?KJKdf%TK>-bo7lB>#zWKu@o4)Y7^8c-%3k(g#L1-Xa+s%NimQ| z(1Ps=As^*v(;i`ijx-IAk%!^jQ)pkGhAWce1T@iFIP-oCHc__Oyw>3P&c&c;Nht1P z7>KFrGE__MzSj573!2TSXY$7uhG6;AmX5TCl~3P}^U2h?GC5{l06Sff&rLG?xpool!CZOawirzM@=YYNDorSk zjo^q_wK9uH%TupwYLhe((;#$}?48iGBV_$-O#i3;sRXp0wm)+o7BFWAFTkVRiRC@p z?gxeyyQ+eFdU3=Lk0O9Tb^dI&4o(0cZ-*l^^1jAg&d)2Zw`d>RvA3lF@m(cG)sM+vwlmD? z-`z@nbPT7jOC8iD{iiVHTVn7DB+Osh|Gw&axxQ<1&%>ur&Ib>q2H89OeBv_DGvC`a z_LpBns^JG85HnR{m!zTeYrgP@~JVC1#c*Y+(;CVjQ)q5hCg>JjS>#z>)c4|ioE5uEcqTMNmycV*RI z;}yTRy4S2up1*g{8pix(9>zeR}*?p4M>uyg$2NkEIY|@V%A}{_dDLt-& zM`UhAT)T%Ci)}-B^tC{j6)(ZH1v;c69cEIkv)|PNWZCJ7wM7dt($KE>)pFGixDkE2 zO1k?Wle)iSB#)xCKqg1@hiH;8AS7e5e#o}>+wLifJI=4GoILX<7t)}Mnw84XCxq** ziwhJ+i>LgwyZ{U0UO4f{K!iabF`%hSx#gpswF*`qFi@e_uV0-Thi`GR*V5$3s0O;v zSfuo;{fBbnJ&kL0;tVPE$vK7;`Bj@C2k$Gx(@*-{+(u#9msn}rJUTT!S+}W|`ECU8 zNvt6AP?vzWJe$}^``7hKK8>9ETJ@Co=5lSt1L%nk3P6` zK3u!trMln8ohi9s`_K8TPhkHcXjUF6U!pi zK8X43^!*g(Ziib=LY&?czyub%-DN2=ABMF!8))RtI{HBW3=y~ASyXY?R~<@E+v z?T7o@W(~^n7^}xA`bxILCo&dlc?f|Guz;8F9}<@X+j8EHc^Vl88f6ANv(3+cocJw( zzG%P|8MMQ|e|kAlOo~6kVfs-P*nloY;6=LKBHdS`K)P91zDJt?3!%StM z=>TCESMIc-3*HFNl4|f3q4g82s5I*wDsOY=MC`en#)ygS{3-sd`!JOO=?_)z2rp3V zpCVLi*Pr_mt=+lT}v=!zJJRz!l-0l ziM&1?ticCY6bQ)!q}o{Dpx)yLL;*`7_jMKgt>o4X`77J1-q#!)yy2AYTZwXREl^W# zW;zcylO8Sr%^r;KX<`E%;B^t9ZSbk}hoTh!HUUS{4_)25Y+=Vr;oGh<+>&2`>%tB8 zokO&xb9@tMgH&oEv#zM^m#yV9DMajY2s0|tIlcBpFrXAwHWSZKLV`{6RKYO-FJfY+)S49s$oJrnpp+aEW;)h*Q06vgNo+>h9$y4+i!krblAZpm-#qQ7>hc?b@g8o+dnYKl$a9 zbcHQs%R*}rsm>Vl0kaK7q{Sksz2pcSYc;N{EM#i&#u|v5xqOP0i1?H&v`zdZVn3!% z43QsIk+iShW5KdR5h~w<8*OJSVPrXIS%TE;x$;Tk9szBWd zEM`c8WRlP&#P^26lC7BszMzO$VUI(A(f5xb|I(uoY`p@6%CCH;$?U}oE6W~%&GX^) zF}$#ag93TaUMwx~+wsp3d!?|T)k=crcNgB;n~?h} zm6%j~{5$WrP3lty|@`s zVi59v>HB=}Kx-WC62B+;sgH&^e}PA>Sg?ldY#2pHf9p8uD&*YzB(hqPAx;atU>(j9 zbf->#Ti40+e@_A~vOHdbPbXhj9j+7p-B&2yCpF_xsrYRXX@Lw4Nqpr1m?x<$j9*^+ zwZ=-%N@2;WjhOYZ1FBsR<27-3LeP!3}F6>a-eL_e(`EHrt>+v=?!bx zsW&L!8y!h{8AzGoj@)Hx8D#LfbNvFpd)`#gxzlyR?hb7d?}t*re8g90G>@YLNqc2i z1ZFGgkeT?|ji=}FwQr^m=BIA_2kZr&U+G)XN(W`SAH z3Zu>-q|t;S!?Bs3;<92f!@d8nn!dKH4L$8g6F=%E-=*r3KR1zo49#u|V+D;qt@r&I zc%loSptL^zJV_&+h~|zs!{$1rMzb+9IT0)bEVso}hpSpK?0`;fSD@)VSwlP47fE#G zE6(v)#3a0qzQF7r{sup0SdIPmqeV2;DIZ2WR}hW(iTHEMRoNVta?7^*AjOuIbRo6V z=Vq#Uwy!CoY~mo-v=i~Zv&9LPXAvyESjz>t%f4VEnyp#eW$|RfEzGX=(%?%J&8&Nw z)$JftC*nRybwjE%;5S}k9yy=(+)45?J~J9Dgj2qPd1pNK(|gPwQmeoq+4fU;%e?)` zM)<$lfC7V@ry3L>wK!i7?`=(9DF{a_3Yfz}1f9eA>KQHY+^c1Yh?4q|Li0tF6(Zuy zNaSXJw`EjLi#Pdfr+$qT$L{x<=^fS+>HA=Qt+v3!cN=yPoF-tcaT9Eyf=I$*jTL9bG)W4ntVv z#YU9^_eqm{PFhm?Z!;%!6- z)wJv9D7^&l`g-CqCC=?^GWa--Yb)ZgR{&Z^JbC&#v<0UWh3qiLUzIv_w^}$@@faCc zYMqs=SNs@AaYXSB7Bz2$qR~XS%QHr>$9VH~y0Y7_EKE&2@!at}CI)K@VuS^`KamLb zTWYq+Sb#hnRG;v8p*R2pZ8w;9A-9T;t++*M`C};DBiq}F%V`!y*l9|}K34YoVY?W^ zLTaZ^Agj}KZ5C&a zqdF1E^*NQ|`ZnvCoaM08lH^)&Rb$JYiV_g0;C^}f*s-auQ#xU3Faf|&>9^!`=9%N( z+aIw`%Jqv^l}SB8+avmAmwDKUb>&CWHB$D+8o|`BSo!;G9qRC1?BX|+gx9H)-^)l< zSzncPk0lnSuDL(3>c7$Gr;fj<+oU7%# zQ1Lf1&+=>2NkyS6wK^8i9!XL>mNpaP(hh--SpvrFX(nwPr|z%DK=zfLva`M!ilayv zOL$c=a{@So$2su# zM2nMzQ@z*AVlQq7%aO$9d5IiLGvh%A-PoQ=W=VwS2PAWbRb%^yn994Z;X7tyYdIzZ@(EoN~ZtU9zuWUxem@uW~+!B zIU73DZ))2?3Evu9AY0~n zX9CT9S#PU_Uym%BO`I5B%h2Jj{pXtxIU_K5f`aWPug-szNf@UBd;)yfs?6v@{8EOE za8NjmNWP)^;!BCo{;nbo6wKq)j4Tl;8y>Y#^$*N0t_E?#A+Gx29cF1GHM35r-3`Qyws*+!(NRbp625_ zRkuW$pA0+D^{xoN8ac_=X#zYxMR#%BBqi4>-fwrObR!M*-uE`fVM`6UTL2gC5%{S- zNH;PMy)UQ9|M0=CT$3wQ&_$Mcn!a4-(mWIk+KAhRY3)}=zeTQ04$~73rLFPa!zZvz z?qhb^`}_6%mBNGK7i^Y4+8z4HmZ9g+cE5cduynjhv659oQA^_~P&i9=!f=0wE%|V} zgKBEOWsBVl(QH2+Xm$FS(Z|%ac|I8O9jY`ACcm%knCCS=I(L`?H4WdDwE|lb8|4yb zkX2q>L%#|mY2$-mYA>+R!iozn)osdNt}EV9X7YJy-W zT1N7$6^pDo>A3K8@p=ot`4w#0!zPyIU@^Nek3BbI8I$X!Oo2H-s0knky(Q4uVGDT0 z!S15KZ}+BEltBvknoQhTOzsC;ZCAkz(T@;i*kMuczq3vfAuvEZoWyawLXt~;8hhiW zzjzZg0n~|}Cj%qkgh!a(JXjm=o}?VVu22mf(^@O4Eh=k~iN;}J-~1Fdj-z;kr$*6zuQB#Pl5)avn44MmZ_1q=?;&so(Cs1+9k;(8xp8o^&nM)r_7)QKcsG9^Yr5-^vvklI;afOx)7DIN~VS zZk~k6u_!8WVyJa}cg|P#*I27Q+-~5X@HJdt{kHu?=rDmZ0)$P}^*P)BGn%?udm=cG zB-s2|Nz5cE#^sXtZRIFT~j5b}}Qk_x$F@W6ishsdYr zNa|5lA?sv+^T`kXQ0jombpNf>*_ZL`HYJ|XC|a(hFJuRi+YhmdH4A<4rsu&;uuUwQ$K1d-~K6z zvKD{&R?GMFN>y0a^fO@e3bVW-umHj|u}uFzg|+Rx`8~SfI2Cvq*n>|LhprSj3%F3c z6*x|H(IaLaC%X{0;hd%21+CQ?q^v3H)(6rPrjnJmo@gtl!n>Ra9;!>=Ys#^ZPv z@<it!#X++?h z!8!fXR*zx!=sd?yD4PU0&lmf@fzcrw=K(rbs?{P^?grFo)T(du4x0-myw@P?9m3SA z{j|cZEx^k$_GHKHjCx_2-(uWjfLkX*@pkF)>rB z+#CXIKsZK!l1Q=M1d`!Dho%Mr-8yEHVR)kCpM6I7B5z=f}_OW}~8{){yYv;PRM) z$pjl*SYLqpJZc?eEFSuR_2Zi749$Tk*`ExUkVA8V$ai$2iFcT4$)@D#?c zHWkeb2j1;JhkkJCZeD}_9m3=tAbnG`lZGYI^d19XQl{X@t$d8Cy1i1SPc@3jnts^n zd+2~q(f!WW;L81mTe8Nw<1dxQQ&r`+GInKIgUQ?5-Vb0-ivbA<#&YQ@{v~!TW*x{K z#HJiyZN^jb)SoKO>*_Vs!u)EeK)J%+2$SdqO3mU_ds`4iL$mG{4{Vq>5!Y&4 zPY-;b;c4q2ke0KzVzh6M@c*M4KJ?RYzEyE~pLp{)F=2booM0TEw7dfQ!`2@C7Pj?T%rs+}`M?Y}p9W+Ux{Kf} z^#P*Z)*L%^fhtR3*PD%r6WtPzH`0-(f{h!vOVnU{Hm)dMF@Tlou*vxAEz>_b>!&3v zY`_~bl(fnXZ3B|@5_BUwwTgW*$HN@W4Yy`DwDkJufSlW)I43=i(cv09r!)BtMjSm*6J@{eU*Te}+agMWdL{w}fpG&qCe%moGq1WHMZEd%KuexaH-Ory|VqdW|E znh?xP7q}!o`%=Fas=NuTz9@rN_rdVjk*E%dvH(ljM1CxsDZ@?u_F_8LWct@_<|J<9 zV_60t$UDFsSCzD1Z0`p63N_I^tqZwjZao3O0+eA=!?Ks1S^A=!(43@ov~3h;LhPkc zqvN47*fIoRaDT@wa9_ZZ^A-t_G_I4mlI>NxA4&#@Oo}@1QysCZR zWcQ=pPcf(m!0!l~^nN^UcWfq)28{tg|F4%L+|IE@+%htmcn(_>$yH1_;;@WXF7EIv zQK`;#L_b(7pZ&}=f?qvdFM7q}`~s|&X_CY;#Iw?$07nU9)AM6ifQ#6@HxcpzZ-;}* z8d!;&$9|{IUA#trTp0jWe}cTo%D0f`L^Q@rfaSpgWak&ENu~kNE(Z_<21Ca%<`foO zOI`36Aen>vi5Rg54#v+kKh^u`mcSUfr2oWkctoWG$7$m*M8sJ%v>qu?IwOF!nWSJ< z(?^@cxHjl;MZe1PWuo|*ml*3xUuWjT6z=#MtvT+p_>Qf5Zgo4@m02q%5Xo0@Lm=^d z^TKqNt=d~J)lG57D-sSKXWlalm1mY{U%`H?zw`IxZj}?Js0^`Sf-bOWC+(fMz8O5x z{exH2K1*2J0hR5rcO&i3zxq$&|Pg=xXR?D6g$GTAcZHfwD*3LgF2A=m5f-oFe5 zx*B#$iZ#?lCDFdPL2)eQ z^SMb+?;V!ytODd6z*BbAH3Ydx>JMEnX&XKFm~ii4ZW{a2W8sZHWszIw7$94RFkPCd zuQ|NK{rfZ3M<~~P-11R4#LPK8Js%WTx7vC*MuNSw#C_~>`43!@7{ zIm43hsb#aux7QwhiEiTz6ST2-`b*-7EQ+73#hN*m=@bFLIN(6)1X)!J4S>oDO(PdA z_RSw}ky z53i5{a-(o{xv09M zE}y5&ekS9sahOGTgNVtJUslYB_Wu+{-U*}LyHPh^{BT9AY;35L>sJXj_<$}C^k?7% z{1qOK9bt4s)0kQj@MDXO4P;yWyT_4~GLK~saY)H}NKefB=RbwioU*_{TB2-U=~}$9 zi4`;~*pZo^B^RWDja(f0@cF` z5XV08zLWn!j6Y{q;8Q_36n4m^?LP(OvN98+xD5on_!LQBoV1R*jJkUy!))_oy!eF} z2OZ~!GA&u8p>pet-_kwL#Js=1eP8n@E+Hr)c<6w~{P5tHP)o;cYt^6mg`z-8=*@iki2{Sowh!zI zMXUJ%Ku3f&G)gr&%*x0Q@fshVc>Exw5a)w}ky4_DL$p;7rx27gZH&EPLGHhszbWo*VS7u`?EbYgUL`z)#5K^ARwSuDD7$?>@3 zrRz*`8A`GW!^K*5**+Tn`}KogTRk-{MpyUQ7u4$g{eEd>7LMNUd8`tEkGC_NF6+f4LM3Pvp@G=ZocLkcth3a{J2UpE? zQ>c>-i58y^$8Fn8E+wQtA9U&&MPh|^*-(2(pSzCIS?gKU&zDDir*5MgzH~YrYKz? z`T?|tU4LM~kB$6twM4N^lI1a@)Fj2E!^(xb+VvM!Zob^oY@5yzrF1`Nqq@|xN47okVZ^+v{0MVRR0|$+h{4FX$5_dy zMc-x_UYpM8(fyn17}kXafrd7TsE+>>WPjq0rMkbn_%UM_d{>M5GybmCxVDR!EInh|nJO?%IBb26(yji4 zU#GXD@b)=cn48?=--3tJL1krV5j^ow$;OQIIQ0_pd!O^Jt?I)a^|x#1t47^tS;vci z4*A*+&5`22eeyD=>PVU6ei_lS>b~fK;$4kRkEnkpXjSdJSFnZ_$5-@50*?GjO2`sU zDKLhbVm9wfJNB(6$eMR^RUq(9ZU$D6W0CJ3_C(a1~5g5;C!ES0WIw z5j6RC@NqeCUHT8-td<@K8HnEzMxLKRcg>nMQQPmu4Z$BokCukALe$pOZ&ca{x$)IL zsMrdIA8tF>t{E6_uPO?_hn_H1P02X+$mjV?X2<8{Y-0yG-LSpI$y9?ZHQ#2L@4e#& z>1_MyavLpNt3_}C+Fq$RRq5U+ON}0>UlzT47&=R*P z_N8I`2%0-31)zfd8?z}2);6}0zp%sT6Q_k{DXH6`nimQ`Y#B1|`Q0_5IuP;R$3;e& zIM{PypTPrSN(bSgjcNJeucjkr+V3?>FKNHNcLU_8rBs2K=8^CY-s^!;o2D9`;Sa0{ zRO|hk?6q8g!pYXM{qCjYkclHMC42wIj>Xf?L1AFos~F4h6v<*j>5_i8J%_b*M<$ov z-HU#aZg5@`izWI?-T(du3W9RqB%ygVf4x+olV!@?(W-3H0YgOS&VGPH09s=Vt6UbeK3#gQg}SZ z=LtkEFY0scTdfZMA;-Kd$emm}$Uu~eQ#7#=*DVcz*}|YgQ8rU)W_KIiX!{lso;w~X z5lX^!np_ObIT$E5!iodBnT;ZT^m+N{jnjeQI3&s@8io(m(D{61c6`wxnuSLpxgaS! zn$Vp2<>_R>3rRe-*c8`(MfuNl28mRZW5iFSx9n1cqT){ZoT_(jmzAAdkmgb)v-*EL zy?I=c=^Hg}-=>_Y)M<4|Wu{V_tjx?@QkljnD^oH{+!?bnH8po!;HfMvEtzs$a)F#u zG8Z!URLH$eDL32_0hJUL5JA?b-#zd9et!SvCxvIZ?sJ{%T<1s=64+k-@>$*DewK}* z$?AtV^#M}kKv{3xtj@$iuaxrpoxN_)jwkTGxOpr6&^XjXmNNG+`)q&C+I7xbGrI$u zdDQP}UbqbS2f=~vih!2L8Fwv#`%Vnx3;l`EozmKxw1v+FNiHp-Io`@5o!g!C zRjWf!Gvfzy-;U{yi2iY9w4CE`b2%zRk0R+lvUpg_cxUn>kTu+cNb9Gq>BE+57jkS~ zw7a!^$)R~kFI*gc529cGtPiVEt(hWijG}J)5R5wflG-Fj$?kSRbpRNxY-|7y+UuaV z`;No+uCr3jOcvEuzbG^dW8NvLbsu~^{lD^vv4hNSIjhiDj=`}cgQ6p5P`RHan0Z(l zwq`elrei{}p-dE}8T{*G5P-UG@oWid0a59(B)OK!cp@g&@ceW>uuoAaHQFvn(djL7$J z->gnadq&{zbONTf+oPvL7cE6FhHf;vw+amFSiS2L zN`}LX^8>>QFpz7K9C+95Tc+QGtzI{`W>+LXJDRZZ!B}D@uJK!fO?RS*YCP=YjCt+j z*TcmX-WNoE=IZW#F*ndiF?g3BVT+jr?$@`#8`Ec_pT6+EG31VrHXb|A_Vd2Kry$q# z4DYK;rJ=-QNLG6zl96GPs{U)o4MU&1AHF4R0t-FZ7Bj^#Q?xZ5<845@<0bH z_!RXl>p}D*5;Q38Ta(H5;^cK0E{M-J{_+^g*b_fZa;KNAXsKgEJ%v@Y=TgUZIZo2R z)`)kj8Hnk%ZD%az-&IMf>pXsGJ-ugHbLlWvEgKflriu`PKM|w+DXq53t)0l)+z!G< zr6&;l2WaF_stDV?G(awT*yH|huwH-dKz9BO3lL%FQFS5bSep3*ttY~a9>bpn?!T54 z2_uHfhkGs+XHR<>YiT8i2d7uTpYrAPJFI=J6u^@8esXokzGatD1YI|KfAhW%bAdu@ z?#Z0p+8?N8*FDU(rw@B(|F^gMro%^7zC#F726jk&EvYr>9RbzIHu7CN4Yt{~$>$#4 zC&@3*_9`-P$6RQG2l;D|UlB(gQZhRpVWP1<@J~Gow`5f9KtUU(2I?9ydNV!m|BE2J ztoT|qX8=9?LtK+V!Z@&M&KD5g67Ddy7IzIOcM3kXIT)l)GU307`uz)oG4J1gsm@ex zBDtFT88$uZio7HkbBx%W_WbAQfggkuoO+~7Zoib{bHA&_ z89@H0*HJ@;hz-obBFal2pX<4uhCA8T`Q+7KJ|zrqhhz6`~T=T2*qga^L(r1Y!`vof79 z{q(y?K{y1m#^?cM$!=0Q2VoF$NPlvUxFgN^bFK4Jtu>xMB_Dqvcfg^}K_&NBnhqR% zp1L8-Gk6kbaE=LYSJ8A2+$Z=PW6l??@4wzhLM z`6;9KP$T+oC+l&)^txlm$1O9AdHLyMW2dhrGJQYZ5Xt+r*5w3z%`wrqzOSqK+W^&U zYkFO!vObuXWQvbg?gfw}1m8nbV`XA~Ox`NUd6w|wceTHB(<|r?>NJzGC_Dev@_m%T zGze`Hp7>-7YR=_rlAL^(%BJsXCmUYXDvU~=>T=Yk@M{H>S1rel&tEH1^+k$%^f_|q zkqDE1r19YM{J_O;iMRwF0k7)9QjHw!dfY*g3ltg#=X3CNci}<#u<7>e=NHpq>M51! zE#Y$IJ;`WR>`->Dvcq4Y!l*`rOCEnoK-lo&NFkXnpf%UB{;sxrT=Z_y->Bl6-i`dw z#(&fL#u-gK=A${@Km!ymER32H_U^|{;UaH+7Wb*s$|!mKmY^9sq09C*1409XR@?&( z6qu6lm1TYqBvhI)(M_t*_Q>vwZ5O%&Uc$4ty$>9iSdQ3&O%HYX;~H{?0AdWH2jR6c zMmCSQKXuq~lkBEU%pKAW{XmXaTPKOns?xCDgBtiVAH zthq%@SzzN;!l0^FKxK{~;&X&Yf`{d`=KdYX0z0hPDaFMpKZCNx2eL6d@N^DTALj;}{9{w=iA&JkmaOC1s(og9(Rt6;8wy@CqRU>TVrfyP!zRW#CFGM{#D7r#d)gXm1Y_X^fqY&G|b-N*T4O zC-4?YB){~MmfD0n<2f&yE%pHG6k^=LXZ+wRxYn@p2A@J}yJd3qb9@@l@IdO=sY^NG zv|NMlYVUQ*_QSi6&j9W22`fH9335(5Bv=D<$&-Sf>BqI=?i00cw#;nNEJE z;<$5}I9^+_RV99q*5KN-$*Lh*ieQ| zru%jea5t-S(1iP_8>B`-oBR#x;m<;&p)x&ghNYQIgQtBtCF|W=8|$?W-PL$CrL6${ z7JOxItYiC|sJCH4RoM$VVNoOS90xYezfZp|u(+w##<5(nq*fdBbJxa25>>W%++k!M z<%2cz;kPTbF9lFq<8@bb+n`xm*CjA)y6gW-mmgF2L6=`$Y7*-eHYwoQj@s}=WtVTm z;fGf;K9vQOU;5^CjdHjBEA+|gu^$G#Y;W;9qvTcBi%(?AUrB;U-W=n>|4d(Jbs?JR zMSTz*g%aI1ot?83g<-ehB?mRN>}ysWU}|=yG4(|Abu`~6O<^e#IODWQVxVI&HTZ*E`>q_;b6M+9z+W1?t2+sJQT*Ma=jfoOtg zg_}j<`p!&0a_L_y25)3(Bm;%tC^?oW4DS?Fz%QIvcOAeeYdBS$jkU_Ct?ToS-N9%|WnXaeeC za7ZF}`;D;`bI6Pfh&g6V)Mrdqa;dGggW5JozS$NVkHOKATM*|2Xq4xqFxIknV?x z69Y)l?8v#F)7x4yR*91w22Io>s(Q&b6M_^|FL6)EEFY3mc>YV8RaDl@1w!-t)k1vn zI504qrtT57G`$fkadNn|e7Nq%8f^;gzl2vYn#kKhjo@ZNzIf!+3H+@`!w+rFTadL~ z^+^dL!{X2c$@`W`t_X-nFyBlKoxNyCe=ZZaPoZo4zm4QaOo)}`1}k{ji3KBAVoUif z!kT>k2`>p{)&sAZ6*$>};6@PJ3pgEKI{78&g#Gt6C=mCQmc{drd?9)lSYujb5O1Q3 zeXlXO;EXX@;fi`6w?h`0WF(6Yng(hTy8LdRFwZM`3OKcDgi|)A1B(yX=g7rViZjV+ zDLhhj?puVemBY@)FUE64I~GQPS-i^Je{B`ic2H>*lMMU=l#bMfg32mVnRjNMy$Tq| zl&@^~$>ffZ{-1OL#JbuFN3O}A|9dLno`ORI4;-~)9Y_7X&90c`1yEr8!GRqR=^0Ul zDV3HAQA1Xv7Vo?N3TC&;K%q@pJ|lvE8W%E;rw7*tw;Zc1p(&fkze1fI1~-1E`y&sE zxnul=Vj<_`X<7q*M<`P)CR>dob0d7Cl{~qgpoJz9E0&OMDqCO@*A$FD+v2P>2PeS> ziLsRWXB=qfUs3aUg$lEG<~m&wl5BB>@o(dB@r_(&({f$S%P01($q2qGO{~J3ENi_` zZWH2*Oq4E19)>!D_>{71i6wcQ(&Jo#U7Te)-T?czv&KJ%V98X9gaC!Z?X;SW^o>&F z1p+8W>(gEaQO0U%%Sg?yO}z>JG=+_Fs}QNYYo}hE0?6c(bqZ|+mlht{nLD8UTL)*~ zRL%a}?Z?l>$v@PaJZn%`X2rFx4v!J*-Frrp%P*#ytVu5h;ox0}cPGiL)}(g+!>j$F zA1x!q5*c?lNyHG$K7F0jw|I3KRj~3_|0?YTqz$)W*0wn6zZ#oscyrbS_N(PpC@!ar zt5fmWpauhH4x_YN6*+tl0j1NFzwtx_{3y!u7qcTk47uJO@?D{P#$x79v!KmGp5Rf| zeH`ff*zlov+;yPmU<;Jq;FI`EANbxq{qCx0PxMX;4 zOJ-D5BoB5PU}Sl&K5qZ94Nd^4EJ)C|#)e~Wl@XIrtFs)#1BVgp0A_J zui6?mDYc-!#*K7a8)PDq1Y&b77=?JUJXBq0SaR{1&scw@4mlt&)%vwE`&QhAWF%?RLmMVU$MVs*tLnf%HAOt0c@j z?i|p_nDxCe(*7-abZ(#@C?M@x>`%KtLuW-DGp%%XZb>`;Wofp*3ZS-`dB!cG?PZww zWD#>U*+f-i7r;VR&rvPX6w$y12kvhGfH+Mmya|>qLZ|z4xc1ximZQ0PIy~Qv-n^ip+>lWs-J+=7@tJ+yrdcf@H8&CNrf&*O| z*T6WOjNg~`pN4W)dmCPmY^$Q+H(>ug<`vO$JmQL#Qt>sC6v3k-`>6F_!15~pw%&=t z!zAr%lWstE2Zq`e5{DF~4rM0J=Tsu$Y-Agc2X4TeM_{!G_0MIJin2nQ$IijJ6mFs7 z8>P*rHOMw$3A$ArH@Rcv`q}f%apxg)(y`Ft-U*c<`9VUCg}SrGlgm=y*bswi&kD0| z^lt?mC*}5L7WY<6XshLk0C7;wh?T!AZvSU)EBX;qyKd^B9?PSkM{BRe@O&At;IR`pRxtq6)# z=x6qS?7HTYj%n>f0cS!T$s$vk->}C~q*On7P}o zSa|@Mj?w_j8TF=@k{vnQiHrt{tj7*{-KO=AcmH2YFzDgcc|>c*dkIelk!JYRt((sm zg+#=@3|?wcLA!I0v<-B!5|A@OIe>~kL`z>JZxqryiF!?Mmai*)p6|T86k$whBWR4>>O|MRm)gX;i)N7wfSh5fU^Yc{NM%`^4lt{MfbnUIR()xuTr0IPo4&+G(jYfWz#aF;U{C3; zPTzvkkCgp`_yl9fc=Hv`t>xU#-ou_8#dQqtFL|_F&PCo_rHo6pY>RA{iu7pwvThZL z6Xv1P>VNg9V-~A-p2ROQ2*V{}gcG@E1?zIkN{-KnLooq#j#5g|6GH992A!Rb;g^yA zOc)%u=rap;xPivjN-AmF6|c&0 z|Dd~J)X;{cuS{w~D^(GaIsdB7gZ%#dWP9q)KXVo5J$OtX$!7tip*+}YV+uiALUgmHP`AyoL1Z0%>lyS=@HV6j~Zm>=M zd`c@z5^fv&POPs8a-d<33!Sz&TDA8*wAmMr@j88sPrkr^Im2{+m#Gdkj1{I~FJ+LA zWJ0;zBE=!ceyvG21DM9Y2n11%k?uL@Pj(rQmd@Ja?B5g;c)YHp&%-hA>TDX*3P~6+ zqoOcqZk|;0;Q-ol072zKTWw7e*zibmn6dy1<-BL%p8frGEXfat3qrwW^<#pbJerP?c})QD$~y^Dc7#td2C_>B*RT` ztk|q0Vss~mvAEH)S-DG!;Y8?r`+|*o<7edH;+SLSjy=S9b>EEaVLP4SJtKUoe)7=H zVAD}kghx$WM*xU}y+C9nH(vQItTKO~m7dqfeRN~#A{v+}_?GW&n+i+zx3FvvIF#RP zByRD^+=|;l&2_l`d4?#T`00?b8-wP{WfpSMM_vo4t&|XtocyB&(YW}?SV?G@6`(=R z4w@l>fhj&)KE%?J?uuXRsn~abe^Y77I!D5DJcUKIUvY88-AO?JZ#LY7rV75R{Y<;; za+uMDM}qYgK=5q5J$6M>bR%4eeEKJL_bJWwu@<7soWjjO$_f8EWliazYWG@?n@_a> zRQk_fk=9_;X>--s`npBeC5JM7SJN$v`o^0tT_l=rp;FSQbIl~e$2oP@gUA>lmdM1S z?b#c?t1UITqDcWjwbC`(iZ8#{6@V{iI5(SI{`#l`{>c0x1~bZeTi-z3cV4PCL+jgu`^P+RFgY$DqqjQYWpDN9 zovg0+b!qhmdtQy4HP)p`&Khra@a#VNwdI8H@IS zBesT=Ba^75(ykt;hf>U;^tY;`89Dz8+hXmRq^VsLKbJvW^ z7Se>XJTZyLEDCFHIst_(jH$XUM|oGSuJ8u@Vtss78}CRHOLuP;M_g^q-0g$$Ip*`{ zIJ0VOUSeaWA8O)0xPSH@XpveOE9%w%ZRz?q9DIF6Pzzo!pRkam)wKf+$AP~GH8M$B zk~th~oL2C!Ih<68elg)HgPew zp{gh@=$mf0V;XsX{)6A}v#F-Ql;UiH-4*NsYL^vjMGVtaf&Pya2b9MJlr)D!j%FFx z7vY{rfXLAP%+H7!F=bnGKP95#YD+(>v9(~`!&-GU%_gKPnQU0p64(w~eOan=aXvX{ zeAHIlN*|?y?#5h8w3s4e$emZNaeOV4Z)e-tn*}^%eF71eIkbzNUrp|I*7gUFvLlwi z1$X)zOe?{)lenDBT?%DIditV=!oGraKD~YTc}p5zaH2C zhwkQp9n6T%48N_0ze%ie6h16EvRjq4zzx2sT^iVA{Bose-)T4o)%Oj z8%s+eb}SisZzYt19mJHjRUkJ4Jn<{UJt1L7Vv-*GCwwT`j6u~%J}Yc!XKwZzndD8X zGbk$t=Q%EKL`M|wPuhHu_8meQ)MSDnUDCQjw?6yQU7AA?@$yyxJZ~+3SKDtr-kv<( zo}cGizE%8E7NPtgLdn6K^@<`-XQ*C?tpATw>ZJmG$7Q7xk}d>;z}PT%GJVD2k0Uc5 z>E={)4cPGlD#fc2TrnUgSGe}ULwm9yaU+Z=o`$!(Wo7f_{PD5}FS-$$xpUu=dhYT8 znR%qy0f_@l;{@1G(_7fhmqOk`QX1pCT1!5CnJ$w|0b9}0XP~J3zr=XTrgHfUEIYagTMQnoQq>Zm4kFgGbwA zm9{`__fB;UC3aMCRH3g3Cq#jBFr>mvd)UvCBwps@$`z&cH(cmr;^Bv7(f~wXJ^oBlbxh%RHUb=O#tEDPpt;9%d<^@f1w3Q zPcA3hlN8$YR{R&~NBB?L7HGr_P(2h8LzRLIc^D|MQleTIiEo^i;il`VSNVxu(o*OFY;Mg5x2O$y+a2ynNf?|GE=-G&pC;m7Tj z)!q{1dJa`b-mrxg{o7E?p;)1{uQ!mm`&@dJSAkJ|010vdkUMbo7iWQ?H>AoNEYtbh zII;mqotbusK}g>8f@rocDQ~yKwcAill#PD;BFT+N$`0-{pOHU!0IOTeIooA3dSA-C zDfRpY)h9Nck3R3vmuqqb{1x5`h&x;xvg=$)g7TF;Z4FVq>|NQ^7+PKTZIAWBKHb@4 z8HQLLes|;|Y*=KoNZoW{=&rFZXBOv{zl4uDO+c?bs)d>g=d6>>K9KZ} zm|Jj0tUX1Z@D}uM{so_>1;-TV2?z9R7`zNKcE7SJt=jO2fflQ0j?<$fJ~$Fgvu|9{ z-vnTYZ6g~=n!>^uf)p+0|0rCie-eH3JxojQ@c=qZaRnNl2i&gNP=wfBStvx_JJjOV zZPSO#nZf#&t7mpWuT^LbaZoxCMwcG=uJ+mg;CmHo1}~z`f$PJ=CCvSCVpakth@U#xci%Q__Iju9h@eo}H&fAxUd_q!-%tK=ZOg6L;cj^KvGl z%)7)c{I4Qy$Dd2?bet$>_4|PhXEI`$+CxxQR>GKh$Euyc3^VBt@slKQVSRS5){Vpd(eq+ z!geoUv|$VHIoLvYuMu2dVA`bPxcAEpSh9Y-GM2U@}@?0L1uJAcKj&HS2n zJ#ZXRlniZQziK2^g7?I`ee(9OqVg!CejM+C%)o8kA zd(iabe~ioIYP-q}`s7xP0OOG_vq^ZGDx@cUu7nSsC6MJb7d7d=6|}F<1jvVwrEpb! zMkWqv{4j!va00HQV17;$Z5O-CA~x>(C(vVkZh?aOU5yMNbR|e!*!%%9>l6X58Xbene`|3auQ zy6B%d)06eIhX%|!;0Dgcxw%y;0N51OUi`B;l?y)fiCgDiyi2dJB+h4$ZMrS)q+6LG z7V@i`Wc-rVz)|h}e`0qdCzFUCIr<&v<%Mk5=$D1p`$e>g+TRAP48x&05^_2z5w1(D zjxbg+G5#saPk*t9=`D`NInuVd z%iwKyKlynD+F1VEv>~|?a#d+4t=>j5Gg{2<_XHRk+gTDnJ`je2Ny^=XZT`FW4U~J; zE|=|X-FY`ZM51y`DSOY=`Kdg~R>#-FEbRKL-h@W7xJWO4vPYzACd4E90kRu*1o!6K zk*TL;_H8Qb^894u(07->X(2$sv{F;mXjCmGo5?CxAgZ%{PaH(alKYiURNJ7oH?TqT z5Hf_4j^CrElWZn%g-0?+gsAkG(dBV*8~a=#nJAjq$1Un>#CG)(mHmLRl9?h zU}asM#vjV6t5jKa>D;r_A#u%Z?LT}UJ3T#s`J}Dw++c1JbfY^*C7WALTCxRSfFPHM_~0=lGD_FUJ|pK~&jPRT zk+PC@6RUow#hg`Ga|gTIsQq%n(TkeCN3}OE zKMpZ}@T+L(a<$>K!3&dr{TO1=*QI^2NYwmq|E&BU7yH+MqhqN|=dYOIs(|z)SVwVR z0-)Fb8piO~r}2R@NGv3T@1u`_f5X5ZY7kH509?2cj+(#QJpqhx-gkvA1b@UK&EAOs zI5gjOWr*pAoYMAB-W2;oX3sKA<8IsacXR8hwE$uwx#?z++uRcR8{X5c;XKJog{&@X zKmJ0{dlcKI&xWoK<4JTZE5vzbNAk+4-X#e=ye^?=bk>bEV9>g*dLZ4qVt7T5)R2J$Z+n;c^Uv2ay?Rje#cevSm z@oqbS4`6L=r6V92<`AXs0*afkj@3d<$F|rQyj5K6LDIO^22FF!&q=CY^0%@0wEe}m z_f}4&PT+{cj6LGrx~d<0eQNX^ZUw1|f;a6rG!hva5kb6h(#ao3BZlfN!y6U%uJW{t zkqj!5?hPlf9gII&lXT-s9QXdqPwfaDkCQhY@cH6%nZ9D$LE7{hq47G=SA3(1vIYI$ z-T<^zr45EBd6fC-v9xe{r$X)U4KPQlmp2iKqnYbwWR;v?Y@7c0TBp}uhH%>*)$#p^+kHU4u-CrE-rXN z9g^YwAPU)=4;wILZ?Zy3%IGK3W>HA)$;F~m-`#7tc!T?7*~Oyz$L48nwk zO@}XeMvwmozU}l?TEDV!G2WYlL(Q_S$Y+JTLXL1A7qXhs@@EY1e&vA@5yb)=_;!H9 zf|H>$%QmwX&inh9gQk->M20Tg%g2>YcC zYWQ` zuujJZ51^DDdZ~o~2nGNB7V3O>@UX=LZS@}>Y~=~HHeX3Leasd;fGnQf?wGW`ei-A` z#kQ>9sbLUdy!x$flFL5wsk~ zpmC1Pji5bINo&SMyLBb+*S`|0k~&D5$-ilm*{3TOR+@DV9Jt#!6~j>+D@_cv-c6>4 zNOyMWAv2G>6B9H(a>p@=Q+_@Tz#?-EfeV}Ce%UK6V-v4rVTiYwfi7jmDY;{P?wPo@B}l7m>6ox^mi7nT?oW;;f^Kc4ZdWjJ*?Ivz6wQ~Q zVIDwB?Uc9dcQ>%UA>}V6!*R`imdi59#Fv(z|2Kb021p;!#*5>o;>uc(>z_o&;lJ*^ z?H~h|eTPx5GD*d;(I54@k1yb9FwIvU-J395_Ods)P&cy2jb1!Q;E8p%E9!Sydj;%k ze`pr4D954YeyGOwnTyrZ#5`~7+&}8BjXIwk&Y9=qEJd<^;3FY=;g4mn@)G>yx0sp4 z?`ooyi5&Q5X8JqxK5Y42kZjUHBp=aYpH3i+*zR%Ergh z;&1lq2Cb@489%+|Y{wnZ!a<7z^|qPE<{Pi0QM`BXgQ;*sPhYZG$1z4%{WEb+PQ$#i zp;@VE(!EhaIHFlN6)A(qYgmc8`mfiOmIffBiv%3Y zu$+;Oeb)6R?323=A9RXzh}<+cP=|{{KM4@+zqoIx3@@jt{vh@1#_iO-`BC-I_P(jn z{rVh_G3G25nNg>;VpE~u{?`bA{1`^3o-e~A`p3q!<+%Zd64nt@c3?UVaErFl$kL?9 zanald9&zvIuo#=-TrjP_4)(}sTuLkcuKvlx5AF7wIFBom-Ik`McA6Gr!$jmp#}R1e z%QEKPe&bh_Tir22gY4nrBX$t+$?BZHMU`K|d6$_# ztP#0@)dYgT4}#hoK#dlFQn&kLuJaP85X58wvD z;F`o;^p6DND>$;x$wr=-!emzOuHWZnnRh8ucg7Hf_*So?*eGooP50VVp;IL)=#+qy z=cJW3U5S>e_v;y^9Id+|8UKm;1%?hPW8v8TVBw_H$t~^tA(?}dQdy`lM9naCRCWbz z{@`lgIpbIsqD{I~18&FZA%`!~@J$#L6* z1*5)8-I4ffZm^uHS6<)Si zo5~DOPG8jMYftt^MV;c`x}7x5a8w+C-+71U?+eKg8H{h%aG<_eabO* z&g4$rZm*Y~b*bmUVR)tdNEPMZ`W^3`Hal*QNSb~tu$(34!BA1*ntV!5(doRuj zk$FrW!yR|1E2`p+b<&#Z3f|=rW%E!+1BeE2zh_N&ApN*GJ#EWXElc?l(+3#o%_Nq> zWvQt7#$z(0(#2NP*Vormeq|MIEx!3S;1JT&TX0fA@D7SqQkYe6Rj9{aJ*_?Vd{6%Oy94qe7jbiuzXBqf!REVKP)tU7-m(FVA(^a zCM+Q-6oV59g!JS?4ubA#Yw_E~d zFyrt~vxBJsxwQJX#=*d?3Gr}JJO3DcW#sB(CO>{oyf|J62>*lt&Wg~r_=ty9RL2hU zW?i7QuRmmOi|+Kcdy`$#^118BMo=?&3h0a&wT?OIJ?S`0Fj+)!3s>pOD?ITCQ}b{Y zg6v(YS5EPMNUfM`yej{$RufS!S`I6iG1XW^j=oAyYk=z$@#+glkco&M*+u8OTO;;} zk@vVSl)`sn$@Za!G|J-J%vp2o2+;s2_xnC(Ik#l-DOHJ@6W}}mgKXFmn|xvu&R64% zb-K9R5;$)>UEc z)$L#$@89hL!rJpCzV29B#fypqMU|l~E{l}VAF7A;T`c+0XyHH{zZ~~?g+X@AXsA$*hJpMCkSU-m=%iPSjad* z_n&9(Q*gZv3pIH2!@mz)-Oroc+><*Zv zJ+=l+JVay;zo|5B|J;QjLb)?>Mg#9%lb^ zsZ60~98NGBy$tkrjdG4$Ep(u^6|eJr`BE-m`5Y!dTCu-H^!`a2mymdX(&;N3;e!X9 zXfBnbdxXZ3?%Q1}_5uzTzD+RMoU01)6*=V84C4pr%6erZr!Y_s7owfRVv?zckovJJ zASfP9_x7M^R6uj?I1-PDyT7Um?Qt-J$D#EO8|QM}LlXHCVogL>)k88=>NlCVtRC2t zoHR?@*>S^av(P_UyeVy8_tlY!aFY{v!0z9oX_L{l^18SQbo}5lH)l{X_<~ht#sgNL zl*&-KQ`(V_UxWOI4;*Pdh61PFn?XxcQniXeN#;8A2r1kw$+W7~41XZhinIa#nP;`b zf6c)7!Wj3#fhB_SqI#8e_Yud!c==zVWkSz)wehVSt3_?&62qDb=VGien+F?BZu!zP z0?Tkh*p8BooBiLt|yuU=@ zMjc~b{uk5gP4>O3wuS0kK5ojfIMQ+S@o76_T{0}X@0k!lZAe@8J&C?oLg<9G;nrC2 z6f$tR9|)`NL?9)je9nZQ4%_#p6>xR0N&rML&vG_=RHk;NDR7hDeU2E!o58y7ZB*Y?PZZ_xuubA%EL z*c&398BB)(TZx+{d{n|~2gZwy`U+76y-g}CPb#*nPOoc!~v7UuWFc+Q;PQc+}(5$%1vCeB=C z%N58OI}XNv8(6*8Y#$Do4l`b9Pf$JCD=8bQ>fV&E9dQIfUr{B(+kg?MnQR3FrrPj{ z;vvKy!dI?R4%6V5J&hRv*7%UA!c=sOY-UV{%!T;mMAW(F9;`YwSxD2HtUJ3e!n#<{%5ptrfVihKnjt{pg z4<2+QG&z6tiy6gz{1e+)QU4GXisTh{z9-8X{+2PRX*8SFw@5Cj#Kow>q;I*>Tpmb5 z!L;JaCtW3^E5qyr%PVj-ENf_!yU?_$CHB8e&85Y$zsj z*}HcZs^YIGRY*;ly~ftD`-ynqa|N8JP8(79NV<(GK_Z#ohxos%o|xg87)1i)QW$&4 zoV=8@yC9SCVFjFEc5%wRj~PlK9qeP2Tz`Kpxo?d;GlkIr0DJtl$DrO4ZD7);l}ajr zCtU7qbdS33e@bL75lLA#Fg-ep*yr$HJHruhvlykJFTt-8^@w4|68HcAm&Tp%?0FKm z7)|d4EPt~d=fdsKjRp26bx;5E*WYWn21!XOs0ca>RZ|va0*;^5kHyzac5G|}j{i@+ zM}pDbUHt{E+UIoIdz)=^)_#680zS0W>KnJmjz8&S5CZ2dx}%(?vE3mL41a6=QtRT> zQT=|ue=Nh{WP+}ZF5P#Nq4QsIHr2Tcxe*I>yXwEa{G`3hNjI0{(2z5W_2z5bZ>vBl z4jPS;(>f{T-Uko&OBm7Rw;J2YX;12d?|1fNACN2oGgZrq2%XKl0GEGP`|FKWr4Uo5 zA_gOB8x|BVeVx{ky9n!iosa1Qup9DJcelEZ4ZYr>X^}s+zJ^s4Wm(`f9MkKhZp3=y zQ#>CPb=Zdc3lQi|B{???i+$Q6O6kd~AD}Olm2Fn;sx@;YHzjYSrj7YJcac9uNXysi zhJI>ef87!0x;FMoX&d$nuRiexU3pd8>zhV%ECoV6yqXa5^<>#H_c`xtecFGF=H z9h`g&(lfO-G?IV11Ext{Il#p3V#j{ho!Y6vm$pEfy5m=>(&m&%P67MP(2bGsAJ|DM zFLu_uYR|J@aOh2Ao#a&PbrXeq7!JLm+IOV&-|<9{;pK1q<l}z%Lws@ww-Gm-`Xck=c`^wC0y5iS5T-;h~(>4G;uXfIMqQ(hBs?$+c#PN`+`{ z@`W?2nNaJzf%9RGP7cQr)Ek~IVhr-6&j`H-Ht3Mko*kbYKFxgQQS+v`{$nU90{!5S zXF6%&qMud(Md4Dn%vg~jwqes)*N2a~pDuw{xGDql%sdl{iEwVbHJK4v5~>|vGxE;l z4z7HG3v|#g`11HJJjMST@wAV}ZLh0&tUuKZwH)8^289CxV`{C`1aLS+X_^r8LCEYJ ze@E3)BL7yEe;TY5v2iCmbbUQhwZUgj^(Zf_xwm_r(|Q_OK%@a$$F}NXhir$Leb={1 z(@nI|S7(++3P0+|ZseWaH!Ll5-SyLaeOe`Xd%?s!{TVy zHELz{jLx6JgwlPB(3=2nwCnkY{m6nv^xORDhW3Hq2O`8S&gUXleEg0MPBOoWz><4r zYx9E?9_9wz2MJDtBXpxA2WL~uPtyk6M2pF>p!5HIeiu}`>TW_j3o{CKEq36|gwAZHmk|FHnQ$X9>vnq`g_l-`+zL#<$xno^{`_~n8=Pmj#)j#}9i=BrA=e1I)KMy?pjNhR= zNYy)W8<&BqZ)|woUvZ$ZM|r!p)GmH^gm3a!I>(-SZvo(OA1m6xI7MnmBNXsRn`_7Y zgIZIWfP;ati*m+-%s&YI8)2w1(DXxN6rdlU`tItwng@#AXOH0fo`tTz(GDCls#YpG z9Sr|!Y~Ja7REW-KEbmMX(^^H7uG06tyYX^vc_u;gqS*H!dYc536vR6TR3+=orRCO7 zV)$CuhL4|AGbvqvSZ%{ebwuuQe@6;_*xx}~#=_{1N?**SDUc_v-awCjvb{-z`*%}a z*R*lj>pu6xu ztmNTYFM@1CqM;_F4A1Az*LTyhe6zP~xRc;5z)S`kT5F zi}GX~LESr~bQh+jiOm+6(%g~-@P&a()soVv8U>yoyTvi8xtcD6+l@N z%Rw3@|H!gCXq&pz;X}=*Rjt%c_ABh!%KaYNTkm($UY3Zn;g6$aadsGC9+=~^Q48F1 z>}}o@OyQ)kfZ!*$Ub15tHcH?0?e)CP0abPufSaEL)|||frY!!Lj|;;AG4E}dm5_Ah z#d}DDQS>5s^?%FV+`rdMacTNoMk}<-4ITq>%qoh*=)r1SBsQGde~>x^E{i6ZutdCF z-L9YsD3VuAQlS7dm8LYZB83Yd;B{!PhYku(Hx@Vznb^GNJhh7l`v*`}a@TSwOs56-zDz7 zYG@=a%499r+j&^5OZy;lH35cN!KVWbjazi47V-iXXV;hkK-=G0e59Ej`!rKU|1&b^B z*Vcz-gY5+guRD@2jee`sl}Jw$D|)rs@3V*hd5C;xr2~5|_TZyQc+@gx^Nq9BI{HSz zpJPJN-A)SgSnTu1uLmgHucUeMHfiLhIImoG(CKs72Xu|}5BUuWj|1i6rNxLn(M-WP z+7@X?JEl6PoJ~^)UX6n8>Ih*ZI(_N-aimn>r{jI#( z53w%Iz4F;AQ5kl4nYiltS7+TEnY(H2cIy$HnEZ>1Of^e35zap1_(bT268EIJb`i^1 z^c^8%#HM1N?fNQ+{r?g5?(t0b|NpqIx>7k+5*1ddR75%CY*&|esgxDvoQ04xA#>PP z2{}Y16xu2wIgA_=R*uPGX_!OIwixEHna%9*x_-|-zdydWTen;P7~5;F*W>Yc-XHhF zALPJme)|3!@6ztWy+W)sUatv}@zVkh3Y#lbA`W3&4MNXw{#xS^HbVo{ z4Uml6xIc)Zy_qY*FS~ z5!l(|$OVBnHwyZUKlYMiX_eWNsn>V1Mhp7`m`MEjTcM4RwE{cy3uMVzOF2MpQ4^ST zU{1H(^MfMXm3~BC4qvz%lUQVMzl5b}Z7+HH=0B+*rK_cajN5x^`lb5N$s7&7zhIgj zJn07=6&xA=dUBlcD!dCXt7okOKx24%ta^j$-40Yv20SEWJeJOmHN&S+;0mjG&qa!J zMETW4UD|Zg2$tFIZ@QVZUB2lrM9HCub^nkg+#$}{<#F?~^w?LvR~k&XJT;qZgN8D_ zPeNsXXQqG2Wc9@wHl1kovEz>x!RL9mwOr+Vi~FL$wsOVXT>0@xTJ~eN(-*QNvEy{& zWHmxy1JJVw;jpeUR1Rmh!ibUx zVp_{KWz|W4tMp+eXDJlBjWdpTa5z1^imX}-KQ%V1VuIH-&ujAYjUh?_UE_TR{?R>%@Od<(6@z|ZSbAuJ3hF! zfx??z1uAd(MXE4rrg1`@ZBgP^hUw1~D{!hzDJd<|uYWcf!pA-uW+aWZU_Z(eer%-mg7RUJuUK<*F;^_d zrY1&A=$yw^&j3DRSQkHtvM$DcBQ!#Pi|>3EpuvPCxMYajCJ5wy&X&Hj^ygpqU za1|;ASm-{MorQV9Omn1RP6*CCWT>l_X8&8UyG4IvYU|7=dCgr*m7gksgBmUw$9q$O4XQwFfX`*M*tWLOlEkR@{z9e}G8FolAH2S7O zcDlJr@)jIp`qsv900*qOU;2~=HE;Rtg)@~XG7zJ#9gRm4Bma|<4mgdo{5f=AfyR`kf8F z-xp;%np+e4PJ949BA^=Aw`h6^_5Y?PZKQp;saN9Zoi#As=5eRj;T9pj;~fqrpB<2h z$}c|+V9isgCY`#A7bdBC@6I{zV&0betDs?aGh%&JQWm=b@9PinGzby;Lb+*Sw+%w^zz{5K`(W+bW6Ifq5jVutirU0oX|% zYjV&L-k>V|$pEuwiUYsvLq}*X&OT?8+pc_d=s4z6FC>Ers@rudXh4-O0&O)Fd1>wk z!Vi6NcwDZ7c{}RbcS`-zA?6#Yr-G+l>uQfmtA0bwaRPYeV6&#-$UI@!;wy$tWS%Yi zRoO{^n}z!CeSok1718}|WBTZ<0?jQkOXM2oOfKeD6b?kD2Ks*gOl}$wix}y@rhJG+ z0)_V8boj`KaImGoS`FO(fITULEyZ#jMY=;_NbWZ50Y{-R>T&v);r1@MtSe>DP;NR} zhILvy%ZJd8AX?KFztle-4J%ps9~}NAOiWbZ5ITqgEs9u4`5#GA%U&BcgO*t0{Q*P9 z?lfq&f_9|FB3Fq52MD&zASp$vPwr$xr`Gq@iY|efz;zWnT1?Y?WbVq)-RPGoXyw`r zMi7#64HFCRmkEgYh4@lGlRZBgVC?MevG+_muv@WvF*;%^0f(d23EK_4j-Rb{xSMf0 zKSbV$waZrr1*e^GUX@|Ro{>`7FY6nhi5y)TXF!Ls5Y{Wv1yBWa;5?=N-(z7!px4R)>mX0-{e9$_Xvnu>|Q zzX`eM5|98kd5%t{v?kV#;adC-)?Q+=;o?P+E2Zho2miel%@y;G*uX)<$5u|=!-kh{ zLYf4tMRIO*TtPYj;!T+1RhG+jTe{9mA=VvFv3%%QvbCgV4OhQhPcL7)d*QGIe;m`F zab#~!Wx+owvjz^K)YmF-g1f@>f~jXCAqlsWgGdu!E|^u{Ik05d9NTVMgfH}q32Fcl zDNTchn6@Gdg-f@d-I2dzV7BpYwR2DACZ_tRiRHw%-!Umr0etEK*ex182ofWW7GpWj z$Rg98e*z?mQ!{z*{XExfn>iS3eph<>8EI%oKW-NTI(MgC$x?NbAkMn|lds(LP=FV( zH8MrLpjD3K`yBJA{F!|iUh1)2AZ*K8^d(NVcqMcFKUrQ)?f0Xda#~9QX|WZm`&a@s>Re85?qaMlLz%vhXe)9^nWfC{d>zxUKY-nm+-cJelMwu~~-+ z#`k&H5sx(b2c&y+IoIv9mK%t%A8FEQ2>C!wPn&3^aVt5a1(BXMfUnUGh04~ohBU2e4 zE%yOoM&+l;z1h2yM2@0kBs<(eOWGZH16<+v(dc_p8}1paz^@x>r$ zi8rTp&eMa5VWctMFj(v1;!-e{D1HDV;u;@=`k$$f&M@~A0(bOVUt-{f?qD^OQC~LZ3&Y6uaWM9-=qr0HV*fB8 z04rps0r99s4@i=L=TyL?F6YK#OAZY$f4edLQ#r_ThTp3GSou>gQZe1ki(n^xM}NI8 zU{mP_mE#R%XYVdPbfLG@r`~nCbi9>jS)bCPrBql({a@Z)iPMjdO|S)o-0VZe zuhQ+*k^L3&0pf1RGVY%SK^3Qfj%YXWHM-1zDYINk-KOI2?7h+|GKLPf-HMc^93;O(D$|>!dMpp@#A~@W>IT z$mHs%UdH2kkwNG%l=4k)Z?rn{Vj7UP1dJ=CwuqcPFfP=-2|rJlpFd|#Q(*p>r$%Zk zG4e0P&FE5CEE=xB>ANsPH@{DP&mzNnh_q!S#V*6p5-~?D=KV%ns@M1p7rH26?-O_u zHxcYBye5d66ekl&bAH=^rbsyx@Tq3lxES8*rYI);uKMG+Zu93D z7y`h?hQQOxxAanu%U|FfY5vxg*R8=_x5*xJpT-+aU`dS2Z=VIcD1UN6^C;q*bT~DW zx3;=`@zfpvQ!Z5(-}zaMIevJ5)p2N5#20mWNlSA&yyj!_?>?TE?FRTd@637TFHo6n z1NL2|6g2`j#t!uu`ByQS6mk)^P3l?Kp-djZOD1Q&I2p@dTm~LER#W=@dJhX6r88O7 z-K*|*k$Ko;Px`!T4)?IMNq?*=MiY-Lv{p^HC717;4%fxHzgqFL)6o63I!6&ghJ=uy z>276bAJO?K-#Ux1Sp~wy)#JAL2WbL~F4l^EtZm1m_E^6m2{#H=OyJE;ml!s=&A7u= zqr_VPd>u2GglK9^cP?KndU@02v)4DZ`{uG!JQ(m<-z`Z86vSNYUZTHis;;8HgAq&~ zqj*$GFz)uiOHz7F13HY`O}+phm&V2DO0D*x14MywW&$I&x`=Kpb5<*x`X%1R4ai*=*?UkEcc?=~%otL&??XN~oD*IGcrQIrh3&hg936WXPTCt)oFh?k4)bpXF2 z{06w_-|G2W3DkZPYZ;GNiH-b_vwQd5{*!P9l|9#mA(n_umx@lDpy>f0U4~k}34c|H z&9JDre5^iKD`Y)noUCScR}E7<*-*(Xv1>Mtb-g<3JnF)anqMWCtiDJw2yK9u4Ht@* zKY-Nsx9Lf3QThRnm6Dvb)1g@KS#0kF$aI8RMTSLt|LzMj{3P^(OX7TIjp-$RG5Icr z`KK`vKS&_giA0OXMZ+1hn2mcQhY8a*sJL;cUOzqBLPFG8E&b$OTjl)xn31)7d1A`t z)6WUoRslCMgGR^ea%1+m5r<{vHy$c?l;QQ>)smD#_d}a2iEM>K^~BG_Pt^nTN8npZ zDuR=W8KjsJ`owWH_UOdrnF+w$rFf9$F)!wp$#cuQu&qsOmBHfbNB%KCzU@sQict|H ziw%r45yo)ka6eH}8yg+1DKuYGMuI_r$8pcLN-xhH67u)Sq

    k{&2-HEqWE=gClV$ zyDpgYzp;y5eVk!eM8a)g73TA)-!XT_32-$rrC_#;1h(ee4 z{%GY}41z{J+B9-*wJ6S&^CZb3+ssctJWN`ncvQ+f!~IOBwe5uQjM$pcQQ*8ja2Wz% zrw%kK(I>G|I6+p|aS)?FJA|rmNxsVh9s5iU&9{FliQQv!y2B3hJp~o}SwupdxK93D z_u0LRA-+IYx|ET`anrrmfvx*vRqrTyQ1QKg?y1eG=Qaji0Lov-SQQ7iyrd34yK{tD!8=~+yd&CaGeV-o~X_WF+@)96!$>hbU zd4kawDKjURue{&8#cr2k=RYz~0#U|N1*I{S6kju;Y4!2vDqLO=aM{)3`1j%Qg_yff zJt!M9yWBS$cU$zK;0qj)d7PO9kJ*QcrFFF_$d3xFzhZXP{55nFHW!6!#1TrTuz>+V zzn&qCrB%8TAu_Q>mSO{p3AWz2%n!!a7;ZtB$1vdTEiVWwv1!m#u^*kO#8HPIDwJf= zmaJ2to^PQId1bbKe`E*EVAauNSY=W`kxBRy45mATsfl^1xz$%SIu9_G)H_V~_zq)g zar9io{0P|b)&kaaR*(x>CZT1_xy2`%xp+{I5tp#>Rb>RslXn4I4Gor0Xz5J&vbIu=Tx?d%|I}af#!$%5v&FASb?e zoW4~%+{LfcXiMMP(||lcQ@yTwJx={^SM9cQaA&~uzN}mB$vLKVdg9ls@_prj8Rb0E zSj(JrPQd#cz1X>jEIcdSf~ z1erjb#+liRE|veTl*Kk$yDNA~yW@tI95|V*mzK(M=1DThJy<@W+{jxJ14BF!9Qg9W z#6IA7>%uF}w&RH5ZUd0mT@Z;Ki*mw^-Tp}z!;m%TCu{2WKvU3~#y%qM1o2l7CcWOE zm^XgHYGAipGzl8OES5}8`+B0J8f$zEu6qY=*~FOZa+x-qh;d4^JGx8VWPe)tJY4sC zb093unE2u6E7{GvlPU+W)n;JCHYAM-9Pb+=4|m#?Kfkr-atu{bI0KS@QvwJ>RiW)& zGtV%0f2_mXEl&uT-sNYDexV4RcFjD>=xfpGPbC|}2J>$M)(@)>Ga@)gx;kLF38iVv zamzunGta#BWBVUmVJ=2tLvC(0hrPgknm6-R+1dUrc~yMN>#7on-->CbL2tCWWc@JR;#zyq|TPZYsmPMmk@yRfl7If zJU6YO`dE5~@|<-3kKTaL5QUq1<^@MPy%C})gZ61-&HrB~Tif4*E) zGZ0tGP&jgQCQqCJHfcvu@y>}_$wV|sz=MkCkT!AuH0C<>o{I6hkHaSu@^~3r`g(t^ zHUJPDPGGW-K{-=*UeM+|eQicy_W^&P$V`cM8{5O}j=UVR4{7S>17V1^RQF%Fp_S#w z=B|cA?LRtBqX0LK$NTL#2x+qePj-ob|IVdw2_g>s&TR7wv{-n}~;Uoh(JbcS5ZB6J~hX_ad=6DaG5^Ilo z8#K327P36@Ph;Qq0Yp08!diW3bp29l5<B%wNfF<(`7ZAnksJZ`jDe1_a~mVqL~ zg+&`qmsd|rAa380y~o_GVbO9G*MvKU(TKY-Y>tjpPKbjQlL~JKK#%=`wq-`4&UA0O z7cPkE!xTK2(#9x`PS?yqYdaF1(#WUV=Ms8OW>_b*9wDQ$2~h#g&b1%lvc2Xu-N&y$ zBjqXl)!UvpWvy?a#6{;Kzg&+K5HElh+HfOBbbXQORUml7$+P1urv$dZ1a@Nq62m3S z-qIkvAkTl#M$LMSBG(3<6qFA2Qsrtg68TT&F^8(_n>p#P?@$nAYaZEP*?n|B+CrfR zTb=QBA2#V?+y%PIfxM2!`}_1sd^nG}2UsT%8df=v#Ei5^I1`+ZT#5c-M*HN!9=|C^ z>Ckfax+P4WkVXu3>!_;L9{>TD51#d#NT1LKt-GN8_tz^ zGL7W|fG_|=b!yEM7mkcz2#HS1~<7abxE5$t!6Hy(lNn8g!CqChK8S7Zqd^lkW3&=b0q;`8Dei(eu zx+LfN;gEv>)g|z#q|DVdnALD4!ThR4`2zVeRm^9szV{-Eu4P&Uj2H93v;qD5>4w{+ z&7pO=4q!V%^;OGURGh58r-_yw&-6pA>RH&YSG6rAl9J89A(ud4@TOu0jg(D;9~VqI zcYFoF%>_PCdO)?zMl8Q;6zcNmm0<9Ida2WN{e;~-uy8a2fH={TXulpsh`}L_4)P8B zAtv+&J9BhhI8IlZ&H0X7`Ni1SvRU55WY0J07J0e2)b~{P5%(LTQ_I<2 z52Bft#2hBCY*|_CGT3wm79V1VIOyds*HQ0Ypu+EjyA8McpN^4mUb^d=%esZ})<_Io zSRZyjsyp`@$UCfRk3-x8(KyV z&tnm{^Da4C=ky+en$cnFp_a9RLsbLM^NWY(MNy3waDr7H6)GRSYffy>%?Ccome<5Y z+xQ33#@!v!+c|1qn*TcXfS^H8^%ZVlxe*ov=o4O3%=@*v%Il8G9oD1eK~tb?aS9=wYmmijoe7Xj`g~*e@%esWbk$|q&3Vol`7jc=S!*7< zs>I|{nUL?2cl#k(08D-2GtBBI>^dba%BQ^_2MRFi#a}0Q__kvvA*Pv(FVRO`9u2z&C(aiCU&F5L_ zVCP(sKj;ND>$HALJhF$314*myd2hzntDrSCJv8>)TE)Moa_)VTm-?znq5g;wq5r8U z>WBK<)_(SdgeXYNZ=m5iaMC5HxJ{6P9}%pJT*r43N3#|axfJMubhH^99>q9f9ZM9Y=v$D?5B4Jj8} zuD3bY1}cs#r?#3O*->6mg{pmcIm)l5vDS;el8;wQ@3kzK)zKV}6cT=8aP9#oCoY_Z zlh!-otqtYac4*&^0&eXq_s%w=@a#aB0APG1_Qoz)UC!d)1i#$p!W$F#r2VV)z|Tu% znG7IOd8(#SvgL8HLkskiQ=evj7VC%0oHu>VTO76Y5 z)m2IL1ox7AcbSn0V1ALKt~DffdVk6NdWPFW=Ct3gB15I}^$`7cFC7;gE1 z{m0T+QraSoLM}dzWVmX*1RDJ5NvaW+U=H4)zku9HWZRLX=OA)HuHXq z%FW~0);>3px!-IRDkq`Us7Mf~VQ2FZm1h{+QcU$~((kW4)jFK0H9qiXd8YfHR0j?5 zw#iF$GtNhl9daPAJq#6Y+t6KCKS2D^Y#T;VlfUqr$3j}1Es~V23;Tv9OQOTV`FVM% zsTmy5Q}`98Yi9;)s#B5Fwx=C{a+5EN?`pYF7W+76MMLx{hW~{r%oJz1NV<;F@(IV< zB#sH}Pd<(hg8Uw~cFC>^+}`C32lGNEhbEd=*{qwP>*hqX%^H=$n~jn5CFK8zKG`9A zC<;vFgknU9|J2f^F~fHKKYp;c=G>3w{7GC#wvolFO0#3=;k*y7Gb}(;SdSO{W~0h) zsMJW2W4Wbbah+;^n_hYicqp%F!pcxJ^Vt|g)^7hUh^s9=bmwJG{8Ms~nItv_ zt9||0!K7YT5@@MH?18c76nE0-N~|H(HlzMB`m}lAS=FZ^@ddJ)?bX$wFdWvCYP6Z*=Q&cixX%nX7#M2W&`QuVihYfhiWH2W)!* zD;wmeR7W_BMAp-5t>MG(B}9gze{SEbIsy-u@>&mrVq+b)5C`8f7IR=p>&v;hE6x>I z2{sA)kLhsW)63J6_cy`!`94SYUe?wz&49p)WnQOi@akBH=BagktdK-u{VzAJH|zmA2?&Q= z)<2N0H&-@rgNjbt@Wk?ZaQ9y^+{wBBq;?si+$4=EAsWnhDp`8)j#;{X2grXRkmjthyK}zt=wR2*Q5~©jvgry;|>nYG@^+LV^AIqUa-%GkZjEaj- zbi(1^%Vu$gKWfvgnftUVaA#XN>wI|5&&KEznBj*VqNP}HL-Hc~;ufRn>`0SSHLnz| z7nk$vUoFm{i(m>lGb-4id!>uBi_rHln1>~`@LHyg&}$WH;_)il7A*T9?A{`N3$;hR zV0<0Qir_q(bC@6!=D^(TO$Ie71gyyPGr&ujJSabMm2iq;06VU4BnD>}%O-hvj;&nm z^QW*L4ya%7HgQ1iV9iG%5M;pOo)} z!!lA}Q9`1J$kkHEc2ds`-m(icQ5kd`#GlDrPIK%=RxuYP9524-Q2FZfmtuRiaE6Xt z)ZOB5v}tUENzdhXM&hqLa~-F+FN{rp_rpKvj;`@D1(BkQ0=w~c%Z*O%P9WFj8DN*^n>V#=WGjEVJ}M#m#-s zC(k>o*FSl_!}Gmryb_<^D1gnIC+XAh+e4po%RW|kTN!*OPM$&q+J6_|wOWYRLD+Ru zmGKRzoR^i-8pv@1%MlmsBg_s-E!>~@U|`Mxa}e*^T%9<+5S4445>fX#n{)+?0Lea% z(i+=LJRDl zY4kbxBYmVSM|H=8Hz{PxZ-Yz15Yz`m0&E}d3P$UcTt#D{J6%QEBvg^1xw1iL*2r@%`9Zq3G&EJf}kJB#U z<=RxWvS_IA18@G5()$z0qy@dLzT_wx_Dey68Rt*(>Dn=#+W~%>Vyv`wrNrCeJBSj6&^i3oxkToTD8 zJhs&HYcvQ)6YNDGo0r441e{=t(iwO4wPrbHJyQ_!Y`Ut@lu9ha8Z#%gd`I7XXnlS8n44X*7+?QAWgua4Stayt$iRh|1N12-d|}bw|CE52}QV! zz`#l65$fh2zx7OsNd~SN`?KJvlWrTIrh`OuV^vu?@hq@XfQ@4>OK@JzHJpq#Gu@P` zaTx`{*|U>P5oZVFvGiNUvRgIo=j%voD@b#{|0iXWJyaE+&&9pi_@2l36}!>-?@Jep zO%6Sgmac~*iZ$~o3GJ#zPdT<5MMjoACr66_BijWC*Iki1J=Sw2OKfBW;?4gF+%VLnUy5s%-(1zDn*I-I?sNeS7LQSsr2BBzI+1(Y z*rKnEfi8Dnp-q`rD7uDf?jG+l7SOO)Sf`+_{r7P%zA1_GF|F`A%}3Z1x^3xnA z_PK_F|AFx*F&I_J0r9R&3jglg>LWXDN- zNk8{(FcP~R^+jP3tquBl@{v@@mf&w~WpP=-LfI>s;o0!WbJ*d4Qf57+M znNUNu^8tH5rbTTyzp-jxI5HvZoWq&LZ{HZ=r{tBg&xxYR1%;LkBr-APSADk0{zUuk zB}+UWPGa|A9w&Sm`2KO>h_v*DG94KO-mS)WwU^gFS-z^ko(NM=^7$3n;k>5EIda%5 zd|`Q4^(6-_-<0-umCeuoZ*}GvWMKMOURU1wh@sVK{jR6Mo8ea%aTd@kAAkgI**9h* zkKbLwuj(slU~+%of#(#va0y_`4u_f%CwTb|;Ws8Gtb&InaG?ZX^t|SPY@B^+Z`kvK zM4YdCGX>;zIu4l;?+n*lM^UUOIV#?t)U8t9qK2-K#nkzPe3t}E;mTLB~9Lg<^o zIY8%g$%4p&ncie|^Pmhivn#&l&Z&ZDn}L(Z?l{}1zu+|G{pHb=WU>8w!OJ(~@@HCV z3J8ziUjs5$aSZ!GjJ>1+)sF_dzVN_BkJ^kiBm_Ra^D=9dv6denf7GMoXS)xQ*fA;6 zE%k68|4ic`1(&dUIgXE17N|QMrJn$rsV#Uc&LFve5dTV~5RjO9aCs+G;iGH0S*v4- zpCSz>R)7DRK!D?1vlYSq8G?LhD(r>>K0p>Z|60g_Vxq)olfF91vaKlyoWPQ}%kwxz zHgoKqz715oXKjwPE{bs+?->PwTgAMR_0{X9ur=5ph4#w=nq)KgTISl785ozu;w`$5 z7tX>*Ie=wjK@@hunJ~n~#QynxTWE>HJAN~WG$Dk`nj1p`ZJocV+Zc~D67kBwB{k3W zarm{LpySoWV7*o)a0P6Br_VL^3bTEPFJ1ENcXUy-OU1t9Up%XZ-h-UUmQo;9Z$JvS zbzpmScFim-hC}s0?1E$oRnY+-s&l@>8%gVPVj;$}K`_1Y4zwKr^_$;0#c~~9=>tCp zdkx%dNw%_QElk8<{%t(<+#4KQ2&|IHGm%C)5V4=JN&?IK$vW*NB8vf(^WYMxDcD+W z*kZEcQ@@BOfmh*TWsVl*WtfSD;LZa_oFVRu*_Bt%=eovXL)POWvSTFE591ZF_?~)= z_gyi3lAQQl?onD&A4y{7RS%fT8RH$k)6g2Fex1Y^FLYZedqanDfx%9FVcevKbbO~Z zE+XS1&Liy$-DH-YQEd+ASS=`TpEzqYf&r}-^hzdc1Xw^wb_l>M|MyYB06W+9d|4z> zaIwY`KO!8Wj7>8uw*2-$uQ%6?G!?>FpaJYjbQeF0au}40G)=s=w%|U=I>CQQf*$*Ze zA{(|Fi@aOTp`@Q}-U2*{0)5w2fKLkQf0oy$+x0Q|*nIq4iU>cYxrpF+O)XC3x4<}R ztEcY@L=JgTk&-vXlL}pA941S=NRNtFg|!cqCU;I_+5qR_O_8!B$KXGyPe5hY5dZ;q zAby3QfREYANV~f&ad)Cku4kqCSHQa7J)L{Ym~c3;176{cK!-Loo!+|y+&XWF}BA=zHci;MYZueXMU?% zlMM#I6R1B9hbp)<;^J90mN8O8HV1x`+c?cV=obND+qIXLISXcN+wjc6`{WfWw2dG- zj2Tfkyd8;`p2M@V;x@DUL$vp^Cq{rGpa;Uzi-se@((X58xJQ$)zk8MMo1d!|>mck; zwpQ$QBJi8YTNYE>JYuyqMJ}ygr~D-LTiC-+jwvXDrbO-`qlutsGs;-b^0_|r>@Ld9 zK{JGZ$yZeWzp)PU)5nl*#{_WERm@M2HdbhiPI9Py5UCO9!Q!AI0)}0QJv_820b+?y<3MYM$be^(y0Rj>6>Kbe%%~;c^E; zefhX>P7+7wi-i|1KDQ|0GZ*AU2L(iapX80X96PDC1>IcqCaQSe^3?Dz z27=9eXaHtl#mDwk&3l`-&gvNxRt=D&Ml@z(+yNS1T73KgY~w5>_hVHeV?%C=kM@o7 zD1e+P@(A~>izJu~IKb|~PT@;LDwuu;buCVRUWjy8NLpi1;wQ#xaHPVPS@WI(!LFV1 zKR526f82*xtY(MhO5{MysD0V2i_X*CNhN)rk*VeXjxhk2iL*|E*;0XjUDFMYLSpsM zD{qyY#9qC$r^NCe+2s^eX>;gDaAZeCB@BJ?sB(;A-NhaW3sDV|-ofEJxBY_UYs|dPPOjdJM%b}R_?QG{kh>mY~H?m ze|OZ9Of=Q7+c#T>l*b&wbH=+GFF)L!5b;@1&J~G7HxWp83%GEIfeXo<-RzsAq1~4~ z#mVPBiv`A(T9fACYzNUoJ^g&&`l_yg=?f4bA}krtEoXMbm55AHld%niCRnsCj=wA^D9r4% z)tBUTm8ht6kyRas!v3F)FJT-{#fDogMXG6U-?HCb{hmbPh9pp z)HxP%$Ss_e1~P*S9dXXy(G`bt`i;AaVMtKPuBQWU8#0p1q5^xH`*Dor^=)E8N<(V0 zac-TmW_e17~?c6`hQX?{uCExN}LD%vUNLjVz@$D=w3?j z|6RQ^JYP0Eee3MGli15bOUa&(P2`T_ZhkLm8ps1vzkpoQT>Ba7lH>L}#19Ph9RLE{ z>U7DMpC3|p+UYbgO!X|$mrd1xezZ3UYmdD5K4GpaIqQ?{lz-fzE;VWfm6RHHHkyuh zS+;AGd)d9R9~|XuY>7FxEuS=1lzLvos1xl*guVb=!^h0lc)dFtdU%MQdtKV{l5?+s zNtC)JWDs{X*1hp7xwL8blaYVBByRtEnijz@_cbQ))E_buA8Q|0J@Nj^h1+G%E)HJ^ zII+8KqHg@N@6ykn+t0kid`I>Nmt_@J)>3Ej6VOxPWZRg3)Ye_)P|d5+SrZl&e@5tVg!rz_L%RxFhs77cVdQ2M%jk_G zpnq^1_G>+xN{o-0e3T@*Z(J z9mQZMG^wz+qa1m$eg!jU!dkTmOScYgAowaImOQFIlIn|PfejB_uTMWv-Nv?zV;JHH zfsmLWD)_E8zA1jsI^{H@QmwH%jx%1AoFY#)jOTr-@Yeg~bB0U^u|0J!mNQ%`_fxI^ z!baly;(BaE$sKtql{FH92B8zs@-P`|dBWeKVcuQ9mu<@Ut{=Fj_Uf8Mg7A0<;F-YV zqiFtT8{z*F15ENzILqRJI99eKzEY!QpF3-MuY>v zD!mCOwU|HC5|SdIBsCamcKQCY%A@I(;r}9}^qsoHS3%7WUen1hrHAPSC?q|!e_l#) zS>t`Q+|}vplwurX>~=k7h&Z!PZnn0=LU||<6B?%{xXf5inLJ7s%dp2=V<>6n7s_DO zE^{s?K#LV8smz{AZ!=N#eHI{emt?@Fo~UmXct(&OVQY-5IBSm&B5TJ`>Vx0n)M%f@ znc%(Zg-M9zo88YL5;trl9KsY}`hj*N<12PFq?Or*m1#N=Qfx6)o6M0;Gd!s)zCqu| z-i)m?)Q;#j{&8;T%E^C>4Oas?%|Gsv4YgLgqmMHS{{nNv$zik9Ezri?$wS#%iDFG5 zMgF(pq^4Xd{qE#Gi@uX9e(;6%%am^sVIr*-+$f>+=imwL0X*MueCL>pCylM4em8p-77HP zZfz%}xuyke8{&+hl&5e{*{T12LdBio_}V?|x_WvsPlVXw*RlESHcn^FMS|-61`%ay z_sd-;$L1=n`-3in9xoK}Ex4=ZX!wA}gHrE&HJl(Jmz>!v=uuAY(d+gs=OsLuM&Zlo zoC~bO+T_#0OLG@ZT+zXpA=h2=boR&v!Wvd!KZl-FmMNC>nIofaEr;+6f+y{d_PT*G z1M;q57bjEB7u2`k%Y@IZE_cRWJ3U-EHoop8SwnKkU}NgWu?Ii-c3uBQm>6JBJf68k zOXyxx;8OS|7FqO|KR4OW9KXq~WmhrF>>DI&;*YCL7*o8q=%_x}QT!(-K-7~Cfp9nm zy!hfLwITdVX2hx(xQ5<WK6qH}0dc#{buK#8>ur6i{YuUcqd(Az!MshdSkVTw{ zI%lk^?aw%6Okm_A9vO%}BMx17;VlE9^)uYi=;{cx!U;pH$+`@5=nK#k=i0ua>3(>Xm@B<_1psxiu6CYv!hZV=slG9`Q^Ry9(0ecoHpj ztrgX<_|>j32tCitl<)NtpUP)iX4#?MFp4Q$);gC&FyAZr>#^yn;splcY;lhx;D?up zxoIUYFA|0WJc($ekZbu;l8^aYKmt31&r{fVtXxe=EpfMi;=0%8RY>qdnNvU=DQ9~} z9$sy35v-%*^w*X>Up2w7at=R2d%2akWz4lN*5h_#PCH(4BCathMSSfa zXkak?&A82#lco1|-8?GgNjy!QqJ{{LfOUx>rr(HIm(eLFPlu-wRU!`KzvOyL>Uszp z;tWaRi-jxazt6XTXyyGkl@KFus_mQ}Am)tH&131!&XK7mk;!j<*2>PbZ$E*WOWTNh z!D3OtTT;X1Pn!7T+jNb)8DrGy$8>Q0Y9F&zRV%yh!bim>Uxzq=Om9bnUUofLN zJ)eZl*IN=F`pv|-?j5z<;PLT;v8;oVZv%nj{-@F)`a9r zHAloxOw=*~K_F)Lx5GIcOS7Bc%cx2bb3UgmMfXh>&#c$O<~AY5#R_5Z6t#fiS~cCp zsL99yZkp7Nd|@nBSPDW#JZpUJg#A`;d7&yB!^UbmPMMpz?nn|oJY`Tz01x?VP^D6^ z%Sdq#5S1VnF&nz?=xpf;9d*BYHrDOmkUeSGW#cm2#&Ex)=7ri`S5D934`(r z>oOBk`1v#T*zern7yvwW!S64At)O1Jp70m0nEZqDBUXe>@EILD%lFgy6?y@N_z?vo zSp=-k4x-E}vit^WIWBk6=EQP^v6D^>_64>zb4BC;xuTNKeu4sxg(q`&!6#|+;)&rT zLJVzbDd^DWXV)w*hLbAL(SLvt>tu85Sc61u1s!<_X=5>sj5FmCP7Uf7##NG3x%d*X zw0m1-x)!A;H1hDdG7h}60^M)@j9zLz?&%K+8|yOuSMTT=6r8J;)%Qy;<9RqKgus|J zlPFj*V>l9eapsChg5t5hEaEJ)%HK?M|4n)s1zVDk;2E@xV%4-;7wD-@wHvgD!@|?* zjV+-9VHiF}6o{8e5VmN^xk|W@M=aj?ezB^&$Hg?lLOH}mEB*=HXuBPQU6`hIH8c%J zt|qNrxgV^l)^CcI+AkF43LRZr?gw#k1=3;$*)Wx@e=f6)D7Vo5W8J)4b^e4<6%u8gUV>?IQ@3{Gyd%{=Rj&UrPT4%s1JS8qR+5D=k zqaud6W=SkwXXM93@IzE>N?f`@Vifih6Hlr~h6C1q4{waDz-)>431R!oZn&k&Qbr$HftX z9tI(8>?@ID@MC4iRst#l~u^|(rT!0%Oqnu(T7cL)9y)|x9ge_Tyn6_L<2sq zONZ_UIwuu+bFw^;KU13x_}$n?9Nmsf3L9u)S9-u1C)k&I9kgbE7Bzzd6hWBl3Gz`^ z3?VJd0LFk*S6qV5acCNd*xhh&{CvIFiVM@S;hTHDXOlc)XSW9Z@8|x9zbfxkH}yUA zn;x(+>&+W2}gA68uf_bd-+ zyV8X&5Zs&3Ny69c^YVMj>h9+M4(ks?z8ix*Fwh0OH9<&QW87(%>rX|7wav}-aVvMC zjhCq=^YQ`J_5UAHUmliJ`o3SEX2xlgDKoWHW-@ci(#p&orfG4^k~WvrBr`QNH8XRC zGmTT0sZ2F$xj>tdE3Rp#kSm!fCMqf_3MwL^pn%A7=J)pf=l4(7Raaba&Uw%KJn#M7 zOK;l1-?|-)Z#gvskd6r3>F6oF%a9D+U9`7NqL>v}cuiE;x<6>UQr>0B;g0>{C$gR| zwVq|n!0W}XcWr~$h^z>u!OyzB>8fxc;OY3D2Z)=W-a2aWU95C{DtaeRfs-sBF^)(s zJATMx*_lymPp*?U=^G|4b8DouhghRq7ZTAwDX#3^Rhx$+#bgh|1M!AQ^q;j;1Rn+1 z>v6pUoktq$^`ddaS)OL6PXKJHQ7B`aoDzbGkn_NG_-pWnlMqj6C)Ge3NS(~kq-f0H zrUtiw=9R5sUBUNSHSQA!oc>!icD~Bz^>9s%M=T1i{obRATVBOsH<=3El*iCW2t(d_$QH?8#_5k(`YD_$#sXVm(BD%N14kskk7y`m@ zLafGoEW&YH((x^Je>E$&r=Vw)nH1&f1?Fr5ul9O$e<-pl_AYwD*uE8nt%zC&Hi21eO z(s>5sXD2f%9<8?gi-QPSOyX$G;A1s^f!+(tucZ`SR7AIUZ@7`a{kPtd{{xZbv@2j! zFHH-oGY`UV9k0-i^V{0SlRU&I&bJ-T4uncRlywa~?R!OkOkUs*dT)Vdfb##g)P+kEv|QlUqa z;!;Z|kc0R#bwE7kY%RNN`|;uKVI|V?>DBmwKzNCZ*mC(AK#)B2%;WuZp{W5q-NL05 z0PY1MhxOXMFO%BGEP9^jaQVo^E!{7!IRSIr%eUGRBM5+{F(-B(ZOfzm6qCE4do&V$ ze2aY>W-#9)F>poqZmN#rS&6{&pO#P@0sqR`aGSuXFMuQwP`AT*X9P=*iFNVE@+r9gyVdO?yhEq>$<&l~UsFR18zUbr_p`5#&QWP^8m&<_ zX$tLkYjg;$U)u_fhu^Dte93;MG>}{<#BQP*D8jhpyET?;?faA6 zB+*W^K!`vjv1AAy4WS65SB+sx;+bELz;gL@ zueT;(!-vo;QUkZrux0(pv>9$A^`Q_5EP!6N&r|a|z>(b1+va3{lp+>DkK~BI$X^uo zesHqk)}}T%MCI8tk@t)l`>R=tk5_Z;F zh1acC9ghQJqfZ>f{Rx2_`e|5GHTUPA4t(TWJ9;6dAPEU3Yysspi`QW@#nC6>aOT@< z9Kt1=42KL-0p5yUc-kDE?@o++07i>9&@C)l?I~`tPuxw1(xal-V9{pamK`&LN_p;7 zyqEWpvCeAHBcAo+RNNj0wlvTsFpg}#v!>~0n>TB~f?fBH(J3*s*?Ze$z2<@E;MxC+ z=nMOA)$InbPa{SUhEUU4N27Rh8=?jCop5;=?8b+yW<3B@At0TnzlrJsP=lWQ5l~0{ z&hHv<3`u-05HaM3(@;(HMUx($$@6I{^mXo+)E}LJrkp&{o)|0isWrxAt0+go5EHGz z_fJfswkdE;V72j#CK~pXRmXklZ(whcedTPWmKy0+w;Wz}N@4NoKJgBkRmu(hWeAR@0% z87#=?Z`q9*TA~TRB9Ex*Gv5P9TmOUE{43C0TM0O9i6$)PL;ccK@N3bZL=0ZoV{z)^-Zk5j?nV6@ z-kL&akty+5mJPfq-Q@fdzJJp_|HlU&jU;DiDYTvTjMlt;d{hc<`2P|D*aP)%MVC|e zqb(4aB_ii;LUdcFmgR{1qRrbj>O{E#V+TGJ_%T@`&W7- zdC;ici{=Jkr8SdL7sMOl%y-QMbbDRX5pg5D`m{4W7| zL=T-VY=4?(%JX~NdhYbt@`rf<&{$=VQ{wUV1-ri%rvh2e?C!-qUSZq*);YQ_uhYgC z5DKMyj&;|p{oa9wPWZ!ner=S1tqz-aWvo6OW3bJdv*7HLNLgZ*#+ju%ZZGljjrZ;* z?MwR@jdpdE(2wS+f{i}$oL10V{VV!M^)EE{2xUHO9scW*VKz7#tZC}1L+o$*%1>FC zAvyYjGK(H9ix^+K(-0!Pg)a(MSg%8KHcq{cjz7vs0h5viCi3WtC(`uqk@#{LVj`I< zo-=WV#|@l`y7&cJz*0ky-X^yKKt!F>_TXCXIp@FwSEz*F7oBrOsB8F+fRZgQRo$ln zio!}T%c%H=MaxbpUmdN$yDyy0=a{2>j7}7w!|Yds1)|}gH0{wc#yI}p`0^E~QE3d= z*0Q3T-jk#~kyUU2nMtFt7T+%KCyk82D!`^|kN>EyfOUtqh2Eb0&_C*f^`mE=dtKPK zELa=0cjvcjdjwhuLQ;QOYRi$_^G~``Eq58W^T>S(d|7w2upHK_O68yz*<>eNe8Jub zJ?(zGcatJ=un_t@eefelUJ@5;AU`s(-_8M#?yYxyxVALj49FR+grbWw&bqH43(|pI zrNLOdLk#sD!3R z<926DPg)m-mYPEKf+0@W#aAEJh$_|Ydj{?6igc@4>opM8YKEa5_rZco)*fWCr2 zP|wzBP@)sPhr%yVDE)q^9pPI?nWH-g@)`Y=#*Qt0axX_^wx)mo2f&OR&^u+RxqF4+ zM><6Z(-)#GQWHsM=QWi7O>E%XImYk>85o|R?Y2kGbbd+8p{#=j_aiPyMC@-2MTQ@k z%VGzpi5neH?NQl(zOaW}lz`3wZ(*qgPHw(FyRStRil)8Ngw4cL3k#0M8uolf@R@8i zPfGduL)BIrtD`o?nA%g|YjG#g$k>L5FBnWD14KI@OK;RqXX?TmymS)5LZ}0!@)o6` zL(-l3b|7-dNu57H4W4Yc_?X##R@uEZOHcSOdU0my{Dy<+gP#}y8UzJPAEW&SW@-7* zM&Rg%d_by3{GH^WayVY(B{Vr({p5ngy&h@>(rcvIws(c4I%zYPuOhR*Tjh`!j&I?J z!BO-+x+W!k?=&k2iGVkUEF(iC=nU&OApW?F)R*r7223LM9f2l9f#4$MUa5@Zj2YvM zx!Q4uL_w(t-$jXgJjQ}|8Sm^v&+IM!jVMVc-<=aJ zP$0e#yyV2prLscFcCH@5WJ&EDugMOM0y}(tcq8i#WDE@bc&P1?HC$^_@~st$X%6SG zoD+f;U?r3G5J@atR5%*@WPuWc*a&imwSDe@0rmg+e-_Jpzq!2Pi$PGU(`WsJd9Mb`vOg*b9Mn@oP)_2*oi!U~{N3moQq5cMdq7FFaisHvBJ(m2C&qr^RKjSdUg%!jbJ0G@HsVRRpyu+L%Yt zEd=M@ze}FLz98>*mZXcaQxtQ93gZ)F;&F0^G?KKq3{63QZa}4)FJDJ(&Dfg0$2Qcj z`2oO@+piqxFXBd7;~GP!;sHve4H+X8nmEC++cdaJT?eNytzR6!Wms@*22bnpU<|)V zMYA~I?O>y*ny|x|Q3!!1fJ;NwWSFWhJVW-x2w;3e{|R4eNanW3_sStwVa4==fTbyb zWignb8p;wqu4iNUM2NCv^y_0xO$TWufJIx8!2vbC8T$UySPB>hBHx7Bxn_4&;udOY zGR?K4nsm*JNIKa3{POKRbA>q@1~x|{j5Mg%+B`DfPeVR_s64(Go;Id+%IK7W^Q=Rk zVq8OB0EZUJI67Ogtv%lb2P8<$PrSDC83)l+%^?)H)n~>vPB+#ONd%r!GP`xmf!fHt zB{DnnZ>RRN+%UiQt0W7|H!hQ&2g9bo<4u4GCm@iYenub^!(U)y>uDfB9SjBNS8lIkS(tJejW7+Gya9- zhn9%m)I3+|+s$KszBNv2DZro25l4iatns!zR@@_a@WHpfG-{iG9iY^^m-NSkvTuLg z7yRza=4XIQUGs+osB$?Gs)S9Q!p=NAoou#)PkRsO2*u>7uK3caJ|&6r7>&~4P^9r| zS;|Yd=fgTFBPtcKMeU4@$lUvwzkDD7K$K{nR2@fYoFE!xIHwdaRO)sbV31?wSmoVj zj+)R_$9z~6DiSdk;P8tfElBVv$Mt1-`ZSKLdVSq1u^u zhQ0~B(rf68YN)vp*$f-1&abe70X+IGpiSmEc=9aSI3Do(WMeE8LP7Lc8Jps)RrroV zZ(N=r_=HcOBZq6Nob@sd_qQSDA;#H-9$3F90auucB6Z4?x2MZoEWb&FOu$zw8yypR z^If7ld*;Y@$!DX#^j6j6U}X)!`ldY|#u=XL+8dV@QigW3hh$5>HI1xXk}!RIQY!jAr*% zyH#yCyV4w~0PTmJOoX+}xsd5{Zu0f*6MGztT5moD3UUNlL279X8|vIpnN$v%ND%vq zN-FDY_qf?H`58)bAhkxraq8moD%RZe;~hctNO1UScy_kXjf5T#?=(F2{+A1~QVSt? zc0Otv1Es#rD7f`#^r&Uz5<3Gt#gj`$C?TLeba1m6Ez@xWBJD{1EghmSw`~+pwl@mA3iHaiBM@fqn?9&$>goIN~+y9nKmm zD~eBtYRB-=n{M5q%o4L(wI%)3JULu~{0z*Fq0(s^2Eq$`pc;$+#OVF0F+zMB^e-VVi5{ z?=8YD4(&@k!=hEn!qQLnX7vo0hM1tnLIdF8#>sE1u=()N;mZoswUMBTuYYFD3@j8UGpv*ZRPM(eiA7CAtd3d)gnw3BOSD>PKlwa}46nwm zKVE%UUb~&jf@1T4V*iJJ(Mov_pU;$fWnfXg&+>7q{& zZ)l@^mwG@r@AQD@c|@BpF7K+CUV@N=-qswF7vns2fzDW%T08KK_I$t#dtOm&P7kQu z;M#R_ImPRXW0d*&)YgeCv)Pf(*=1Lqq=T8&r%Ua3$sC_JD|E&++udraa(0)^`_roW zsG}%U-+>;vldV8ThmNtVi;>#wBTmcv4cS){@Ff0@dWWtvqmYt7UQs)+zM!jv@0Upz zwAt*I9~V95d<)vq*;=5$D?Fv7_4P?wDaFZfpHhYdY+FDr`t}F>l3EN#uYiIJ%r|s_ zE0*&Ktn*=smSacd2v0AS?oJQN0Rj%F{vD7D%(?t4Y*QXvEwY8>oW?2c^lXD z?lbeS!o3p7dC2RAe?gV2yZ>O+wI4C$F&zi5%-At_FmjS-y+m(f7!P4*whrL0S9P@8 zOZkq#WSTpBA*n`dY-)_L1GUYBZK4Lbt{Z5x z;Bbs~8$|;cDz{L5BJsrIc`J5bg6eiR&Wf(v3y{t&wQ5(ed!yZmR1IyNlJH#|*DI-0 zWp3N7NvIv5jnSsf0Xidb?hhhv=gpoiNWW|8fAX(>vwqV=s0o$w*ndg_yJ${CDH2m( z$=@*65b$IvcxSW|Bo|)vgOtmw3|j-WbAXhW5zW*UJFB-KT5D-4oru1mi}D3?FiiodRc|C`%-1dTAM8%HO(l^b=>9t zcXMr^tmOaP^=pUrdYF5D-KDf@`D~l0lhJmr^G2PgUrmw+5_rLu)n#ST66+Unh)E8t zy;Jky5GS2xB!br{%zbRAS{f3S^J35N2pH~!A$4k?m2C~j_9IY6w&KOf8^gAfyFbB~ zu={pDC8qZzUXE2a9<&u)`fpXb+Px@_-I_zKf}*10qYC{-M|^!(Wf4u&XK+c27Ql;2%8?fxF=u9*!`MlY4D)B;%Qc9 zO)J-Ts_OPTs)!}?&@ySB9P-mM>kOCeEyk2^Gz(ie0~KBrRZi;TP(od8CAtKy@ryL7>8!g_v2eu(d(TeMJB3oL-on*6JpjFAD8 zMFiCR>POG8VAhiE-mH)1IjB4U(A*Jl)bWL{Hf64P!jJ@``J~d|-N(a>JQ$sjVEc)5 z7tCYYarldxX0gn|Sj8S?&EG0!hrEndJ?WrFI2fxn_5s7t*3oL4Y#oCiGmcCr3q=Cv zZ1kYC?01*%P>ENU*UZ{}r&)`n6u7e}B#&N^#g@%k&(cU&ep_G}2Fxa2INvGgG&(`6 zoB~)X&QFBj!w$jI@iu7(y!`aK4kiqe{Yuvd#rthlLRC;Dm$o=CNKs|=Oj$df2wP_C z*b4%7k>s9R(-qONKD*)uV4G|5p7>d_UqAJ!dX`B1Z$ix(jMg z&5Q7AXtSbL1g%Huj**9R_CE zl9~8P^G`Fb(aA+&v6ubgD6Ka9!hx8-UYB=kT_WcXacPg$(aK9=%e#FN1C)<@l&5aN z{nSs34d=XkjeiL-bi<_)22jN*o`65x;x7R)P++zbZc3i>-8epvK2YeopZ-;urK8~s zy$XtB0VmPdTuK|jc{?{F%WKt}%sV(?|$;ZW%Lei!1dU!DRf*)8xT<&6ZU1XL^}Ey z@@T{>v+tG|WV6yBzthuYtJr-yLOL~0AFhE9CU#y?58epw%yGXTH2zL*7YT{gEEs?qWDF;?j6D zbMwKnwMJ{Z_apQTgdIYrBkL1X8{3+QNv2hi2%NK$V}3dkv$R_r#fc>A$UNa@PN<0; z^W(~&s|+7IuX-Z%L#&;QE8ER=d~#2`uOTQtXX54;^j@b?i(bDJi`+3c#`wVG)Moo6G9H-B`B2Bu*uooy?< z8!%0_`k?q&F4~sa9Aly@m%ZuJU5H`$tNcMT;T`hwt2KApf8;+G`W60=RKWt9lpD1l z9x3@jE2Ywxa<9_e^vUEiKb<|D2_617;;y5%Eq+ZLd=|~Eu3 zecMnqR(_sk*r!+TF}g_|wOHvZrN&-nt3@aU6vB)o=Zz&vK?e0~d~&ToL}&3EPY6}N zyMY_I7a~m!)4V2z-HXo^U^lt!LH~7|6TG-o8?ZuXz21Vz94O8jKj1f3LE>&a@8-Jt z=d*M^9h@YQ-ixk@w1fQa>UnLSjGTC#o{45PW$=Hi%qr3U)a5=X4tiFw8_Wtaw3r%UaqW{H!8ze) zvoe>RP=-S=Q%x8G{p}7jwGLG%*Jw%)bu@Se>`7Xu`1RI;VbTSf3boJrVH23wY!Mr@ z|B&QbdX)w$^N($fL+hVlGmH&-Vit@Pgd^gVyHa+3WGi)7pH!oDr1y{v9ocWa@g#OG z-{Dk>;k_&FM{Ljjt!^Vj8+?jQ5Y>gNQ@E>Kms z8sS_;O_bK%r0=zp3)n|P$eJvs8?!V#{CGiW=DkNBnOe_hwL00xH7g6ihPLBrMxAip z>Tx2T<|kW@7481T$7$CVEyLBEM}7D@I7Tu?DtJ)#-P5`KRszO(fsFs1oFd*s#cg{w zjW$Sw4{HG|P!pybDKGhUG|-)bWwn~$Nz^#Ix&4NwvXzVO^ea(bd9Jgg!=4eKVqil#b7oo}T|Va5fs?V=D1Y zd?sHxRpOm=bHz6pt$~CJ?ffv2E!JK+ZN({0rHE9>n#mz}$GHO6;mMX5hSz(JmWL$m)dZ4g^CE*_ofY-_(rNc#8P|o za6>R1rMH>Qr#zt_5olJyH0+qTW`9l5iZtQ{=;^iRQbS3 zp>uFxn9n=T0PRrdlZ&7J$T`rx{{Wnjs3a6baktx1T#WuuUPb__F=dL=Ykhjl$*TA( z5h->H-3gGaUUj8TW7#zPcWvpRR~-by3`hw#+DR9HYQ8{U^`tEkF-w>J;FIz3d~-kW z3EbN9;gT$jJd>Jmr|KP(KHqH$dSk;V4w_8c{o;%1z^kP#@qV%Rd>jN=BLj`=-QTSo z(~g@*7<4ET@-}F{PV7I8I9c`3z0!ol(Y%SDcZiyIa;h6oUB7Neoy%N0?a0+ z6Sf@a3kg)b@3J3bBOX+FOB-1cY^Ma|%g*rC;;dZV*S{MM%Xp+EsS_eWL1;7H*HA7o zozA5zomk|R1>vLK8c>!=Y~DCxu5(;-nBMB$ws1em{cQ*&F&C5W?mIsABL2TsK+VTd z=dqTeRL)S-aiA!~4%DnoveZ%U>5145J2g1vcHI;Q+rccM;E{|=^BzZNFOhzV-hdBA zZH+T5qgrk$fB!Dz@3`x`%z!nre>ga4^)3i)eJ#v3h~Df5cgFrjYwu@U0_tvLDW3OD z44lB6rDz12VBLq`V#a{O3RZ@W!CS&T==CTg#g(PAiaXqV^J6L}igMrEdb2YPd=ikr zvngaZ^OaAUH-M05xkY`Vv60!ZE3#qxcty-xAD{bWe|wKr=;b%{#drT3ud1V;vso1- zA*9M^hKhDLj7<}XyyaXWI-(C$m-fbH0s*gh@ zVGcc}S(xif&>o=Q@hWBUM_joWooPQ7@USShejx(fcUg&ro)Ej>I$NwM1|IJ2M2>qi zTj(UG=|*uK9tt}JS0n9$CQDj3N7HS2KWj(zL?;h4KAb>*gpAceeWy@Hm0s63e7qE9 zpQ7BAfAuE*3NGr-pZl%)3|TYTcfMJh+b_4A4&`Eo*ERp)c*Gz6XKYn%>j~`y;kG|V z^vLG#D>S@bYF2FzRPLDTq;>kCOBE9g)CX9=AJLe^%rjmkpz{YBBg`bpyPG)s#+86? zj=Kmo=hVvmwtgOVFiY*nx|)G;Ow*WF)UKen(+$wYi5V_>xcog(&yn>~o4LTzhD$X6 zzY7I2I_@_+Y8jc@hWry-sr4s}2ocYE$~|B+NqAd@l_KR_4P&rr=Qn$Uvd6#Kj@+cL zy0r@Nx@@l_#{te)bldaPgfjHH_Y;>AjLx5^T7r0s?1vOOS`#Wjb#99PXx@lA&3_2aaeJ`r3xi>r( zLgRD906&`59FLCO*0DIfm_nu**$0s5rFehucYEILAotf>QyU{SCRF!gk8BWwQtN2a zPBeo1B=?dvg~9UCU1RJsDwkTv^rOVYM4X-VE3(4mNb2Ni<7XRe!6n)9jO#?gOM-Vq zpRA`6HW|(TF2I=MJz53u&kI^pD*q2RnE@7NM*yf5zDewUf?EqGFOz~3Sw%9X<4!uub#=NEDtrVV* z6pPK_KU^%CPZ?U+$5>R?wcV&Ew;8#yefuJ>G~{*@Cr7|XoiGu$yn-8Jj*5P3ZkjdD@xfi$d|yl5zbWgJ6?QBK}Qo9Wr=cbgAHJb3{{R8)I9t9 z@KuOTXAwG$(2pp;*Mhb?qIpS_V?D_-(r6m%m3Vi%dl+XSakzG%v9=#y3TnNl=HHnn zuIaCfP~TaN-o@6ezs9(IYB>uT^2N|NUY)qX7|=yL%m_8AbPtDo%!Plu;^n@Z&^x0u zs4E{o3WRH246VvqyNJ?r#O1VsM8;lO4dqY z=H=)d_rh_r`)$xw$8+shu4_iz%(Uk-Kd?{3OG?C=raF>n*tUK!5Ibn@$UFvYm`EiJ zhn1>MDGmQ*qOa;`%ww%g0;SdwN%4D6d3y_}vDfRPj;jw#`Ro-bOfy&f@1ie$Ij#Jo zAP=s6H`FA7tK@{X=-yv}VS> zW@ol%$Oj8x-obYIx|lg~jA4I6uLA8Hc>PD{%hL zW41xVG~SZ&s8jt_PZ@HJ!miaY_HIF1p^r-M7iO-(eK)VX-z)2z{c-)f;NT zjts}_VXRN_ln>s0qdOz>_7l#22JD0ePQ4GZ2yF-|#7lUc!kvE&ygbz*4T;K9L)QpD;i}q(^WG`L->clq9!B0qH3f5wy3yK$PD13F z!;g+k4!3OhIToB0q_Pqm!WDJ3J+cMeBcseA5lNQg80~8sXm`hr{w3w65dKQBayoS3 z>e%)qi%k{}f25REMV{v#$TPXy7QE%fgJO~9Lo-bc4&{If?T=q##9aDeaTdMDC5fVx zs}p#A0R`u(Q|6L8P_)Sa_6t!{TVhaQSn$SwTVNkNI1wQ*a7`Ti;@2f~3;}bV!J?5V zHTcFqznBNi)bGR3K5Q(#N?|eiOXl|v+QNfAu>WIAN*H@dHQ*rLIP0^e00RO*c{) zuCp@qxRmAZXJr6H>Ec_u65BktUA9RFeJ;MY9nvgZde)M{*e{XSRAl1rh5~9o4a~H% zpS2IgpZ9q-AGC-fXB@m?$6&4u=<**(*=r}NdbvKrjzz0Tz@&M~*Pn!le{*S`cmt^E zz@gknA;s#;5>q+ZYSRta=xgv{k5w5_9lUo@Al2G$82^z}W%%x&CU9&!1-?bLP&E5& zK?yQ6TcfeBweG$@;WSZrrOIz-!V(lXvH$C5=0K;=wxRP!iEmlg>sPq^8pno~HcU}_ zrr{5speL*#uY}!I!%O0_vH9(P{Bp31o15>}on(vpGBXa!XfBmM;0%HuUbRB+`U~AN8$GVgS=L#j@!8}gz zd@jIm?2BYBdj}Op(be^W1hQYe08T_=1X>gPxmRL`Q}uw8gl$2uKeCagZrFYf@+GRH z1q^Q`t%bAEQL7*xeOX+Vi~vVs{ut}I5Hwx=(l&f#D|AYG+ehvmW~Ty+qVLnpc_7w- zz9OQ;y=-7uIhzz=-dYrgmWSFR#-TosE%5n6l)sU>mg=Xspp>x#R2}h;Vrn62D&rXg zpE+|Fw9yRu(+uZktYH^IV=s8>c32B+@qRQ^L47jbAK2A$Mz!48S}{XMjah-A7$YNWoBBQ*{a%Y@KN47}avSwE16Q$!Uu^Oi@_#b8*J3d11{D!u&`L~S8C{+dy~0}FR5Y57>b-d7esI}H;w?IOyG*Rd$E?Jp zztoY}udr^JZ~aFzG|bXFT6W_%GfHG%0IA0|Qow6tUJwsbwLVebR-hLTdgb|9Zhig% z2vhPb^tL?y`tC_$$85KOXKV0d<(C_u?-e?Pl~RJ)d>K(g;70puGEut)H(fU>dg(^^ z0lRLeI^-!8OD9TY6W6@gHKx$3We`e<#6Fi&1lxIxxmSh0qaz%}_H*#pTJ+NQeHj-2 zt=brnMj+d+`Pp-tMF6p}w5FS2YO{f?(^cryXUOA9CVt#@8ZYP3f>{AmHWpwD;0Ppzethx>lf%s536^U(}o zk|!R$*&A)~quGmbD20c87ntPoZ2F%9SzhA6va9BqQ~ik!QPI%t_bVsDlV7Ezd;Olx zS&oRGuP}7l0bCsZTUCO}Z1bJXex$R3L`^|1N{IFnA%TBsSH<419cjiO6ihAg3op8a zU$O(>lM=Rrk4iU?5h4=|X}M#TQu^WI@G|^N9CJXwWS?7{yZi@f{hNH;bujondpH_? zjKeuOArziAQF{Ld8N(Wgfjv1rnG42+^hG#{zz&7i2yZ##9NDkuBV@$iGyx!{OE zni6??I-dhvc}LM)Vj@`Y2>4wzeK4?LQ-07EO|@XJt}#L97R;NBj@(=qYgsyXt-mdx z80z~qo+<)7&nRVoXY_Oj11O*s2?4svn)F!kdZb_4bvG=&VC3kb(~&m7z7|6k`lVyE zt57~I+y9Og#=hWxl_c&<$3Ux7<>=)-zN)=nAySLe5n$fkRVRWal*{m#%ANKIny7m- zhwp@)v67;jUU3odn9d}}3s4{YXnb|*irV=H$=0o&%9r%B3wK+0QF*iFu*nUgJ&E=$ zd-~f3wD@q}@5?9a?}fPpr3NrRnQ=I@I?5kRb!0FOnF*<$(fH$uGJ|#}NaM?Lik^TD za|6QSRU}M{T8ezjFBX%Vw|6C;N$JYdr18*!HI7O?%X*S$8=#vihrB;aZiPv+`*T7B z%kruoX!W=bi`)ON)Kc8gh(m0gje- zV`f-?!MaDoc3NToTlE?PkS=4|eR}9hO7_w3&+ff#x@V7Yw&Tzwt+A96E($E>oEKE& zUAo`>b}_IH`)!3e+e`p;LyO;6>8MtBotTp>Qq+jYQoczK(MSR&Y7kFzjf-rmN2eT8 z1l3ffjzz8{)YmU7qePyl{#S~dj8Dkmdtr%ZHIU_X$bLnSmm+d~9%pj0uc5dz{*B54 z2+IfpdNgBK4H>h)tN2{g_-;H%O~7n@T{&_u-xrSpAtTe~Lg%U^FGa`eI1@}?LDpis zfmG#(?*~}-ELhU!E2Ekc^mV5IdQQP~^7GS^Lp;r|O30GJ6y$KB5X>;`k|ON*Hyky47#UMj<521S+qhGt$;^0oA(&F!09)l z(KF7op|6Z1ccx99^KJNN{ey;XG?M)7zo?i5IIazLf=IlexZF~79lK5-4#?c@~27S2bXcb0f` zV`RgTlQ(e+wS&s@!O;!15&o5FUCD4Y*b7#}&Oem7yz)B&SJkfY)7;x-&=hL|SUjBP zZME9u=PND%PmIw)tSJf2K|k1RP#UCiq{DbBxNz|4PpHV)Q2(rJ1u&NX{J&M}nl!Sv z%P_>!hc6YMbXYiy-i28LTaa?ZL%M?Es1dDWUJK#IC;MngSGyhGJ0&9N`t&Jv~n%wkDfWl zu|T^Njv7X(<&={oZ1!y8mV({q3mA(UJg@w#sCuYc<=`4_9Qh9F93aQWD)`P{Rk!#` z*6Wb3@qVWcAV$)nLEpnt3}_R;eRQl3mhuTp88~547IqJMm*tYW$~Ug)aiN7vKK^Up zLP7N*;_P-W;F?*hqW-rErpj`syY`a!WR`%X4zuZ6x~h!UBqYMflbPrN5?QL-slT}L zE|$3*a1sa{#&2!KL2J}qw+@qH;qTD-H|03mxO<;Bj-&Vqj_LB@t(ZI)lum)b$rYZA zj?~~Ech4F?0}^1rXUkuaNdCXeQZXqq=huoY=pF>YR^H{4Q`|DJpD;xSwR*Gz@B@IP~8DpLV6Zk6- z(Lu5y3b4IW)~NA$F@+^E4FJniPG?1qNL7|(9`lV;*vW1d_&&8tgC?_P&_~6#sZa0s zX_mzhMJPsbiAaQHpv?Z**Of6}r^Enb;%F7%y7|B8L_AY*{D2x36y~L}T@GNMJ_{#7 zM+7-P({g-1`KN-L<`E4$BaisG*{20cNN2Ohr@CDKSx&d~OL=+)%Rmn_RyDIgjmrnd zhA_TPPRY6DaIxUAKG<8&GnHWBE7S@~oLKF(=r39j!ci>A0ydxjuNdww`59mF=xe6k zxje?(26W6cY*aCaW|)u%8hIb7CjIIIg)MId*;fom_iALqb~|A=4xhOM-G!zID>4ec zhnw5^SLByXN;rk0U~%8~y$|$csTl*6&rarrxEZGuC7lvSP zw<)l2G}If(M!&m$C?W?NaYI|%{t7LB)DvF!LWNham6R0}%-|3ACavY}GlnWhaVahK zmkLpIbHOa`pt;gsjQS8ik$$Cjkxf(MV!t^hDyh^=wF+tqqHZ1n3H1Hht)g|kW1Lfx zF3NC3S1Co^>R>2W(^})y2LFnKH$`FPkz2*J2!36IhrwP8^TWWkmT2m_{$)#TY8>gV z4K+WaG@zgRg<)IJQdeSX=)PzesZJ9Yuf3~$XD)QIeAvvbz`ECaG%xV`mNEB`;%JUY zIKKYk9LhBA=s2g{6;p+K7HJ)hI43?)P2`^u-+O1ZvRTW7NGm2)V$?<^Dn|t~=z+>3 z$x?#&I2trQ9!L2YS(Nm=L);4(v0^COR&Ni2jU8P@WkD|2=I>Ova_t*xfyj)fm(;f@MmMbAAf_#%=yUInq zQS(~;t0PWq#^wBl->q=HrgAG7@47Xbk94X@M-qOz#$S~eM=&##T2Iod^;$QYT)ft> zoBU|^-_Yxk8#m(TqKS#IZBQn%X@P*+?(i#3c0@r>M6w<&%nwMf><^s2zxmZMc@)haV6RULX>6&P5B=-yDer1l1vR&`{+!7CW(%#d#8OY(i4xf?zrNIxp z(kusjJX>~ti9_z{IB0BI@N{~x#IyY41it3)uysu307n$6$X&U}=@{lKcPIV&j(O|D z>Uh%;)D4F6fFmiALSv3KcJta69e)`6^~OgWQxm3r6w(Gk0-}E{3ip9U8FK{#(HL$*t*mMbq`Te=nvPJD&Sv6$lqO4C$kEXZ^O7 zv(%VdvF^=WlFGtd!`vcqDcw)=Zm}r5o{)DuxrJ}Q8klgOrWic{uFuMMtg+#J z;o;bDAE)Kx^$mxK;ZiKs5K>mnIz{J9H(Y&DQo!ac(hbn}5>|JioAK7nq~pP=^j82< z6mXE(vpu)UFkEBukIbv7c4;T=SH6=9fWIh-Uc2Jn>VCi=0zuqT;m20481GuMuZ_vI zEcR&D_|XPDRZi93W!bcu4@A$mio~<^l`7w}E9JjCT#gbF=t^aO?hp(hkD=%+QFscV>>>-Q7Zv&r@Ghv zwL~W#uBfT|`9;6ttF+5&&2mOR%QG#IZjy|x0cjLSk?Vxzkk$nBKn?M7OGZLwZfVxH zbVNU7=n6+~jas?s^WM0ath!oz9h`H_=$s5=d(8{jJdVy?Cdd#qiYIOLB(_B+#OrPD z(a}6B2pM0KZh5@7^_Sn;S50o1r_9q{Kx-rjqo{jk=~WHIvySFXw$H}JUWI-`3tNWT zCd7qpuOD#fMsbe8j(N-D%M|IdzR*8v3%OCRpp^A;MF=~O+!(nz zw`5A6piKuJ$AxecCn8}PP2Jr1O(WqF6)O@kW^Iw3D|}s(>@Rk+iFkdUF690$)!}^# z>S8wn|D0Gi11Sfse;}1ut03FPusfB5YO=1Qr~IzBb*U-~aNE&~ zhfS7}XtKQEdA<@37R%pGEn(EvvrcY!Fbq(gS2H3(!7RO4TkC~75&`i^>dY z`XwkWWNg2N98oGNYZfT8^2Jpxo{#)af)>1IqL9Ce82xjWP8m!LrootlIq&b;@B9Av{o^0J z^UnJ|&;8ujeP8!=G3hlj)i^nUv>EgB4xZw3j(U3SrTu=MGdZuR41UF-k&TNRZ{I){I?Ot7M{m48L@1YcOU z%%#I8TQW>z_1ZGV+Z6sAAJ<@$Q-yarXo?Y(b@jBLF(wg1K5^llh)Gb2x3u&qv3ZdZW3U`3UQ%%yYfland7qm?r{#GrWG}@w(dp&817is)% z#Oi%HDJ8S`>+b1yr6;|dbog&FJVss`wJQRk&*Uv-GN^i{EWvdlzHulgDtYHsP48AC%qbd_9`PT+!i*?mg_W{1ymCjwp7dG&eh+ zhT76S$0V*mG-xIe-zEpHiq$U9+CC`+)(bQVSBWYr+b`!5v(EVgu9BVU$=Sj4uu0ei zxWK#>XaO&D>kKuM>&qJo6j)lH%ZF(em|g-9c&poAJnalbS3tVSEKR%DW;-EDu{99S zfIC>k`5$AfDqaWT#ll*bac~i9`gDH{2=3bg5W4&VQ~S_JDvuNV^6t00%L^l6=yhcr z;H141jY;XTv=Z7igaOlLo@jKJ%VyNOmlb7KCf90*O%FB7nlHvQVhW34&v~`KxPJ0= zg<(XJEct&H(@S7lDyc(P^Ko@<&-K-SrRxgN?j#Vig3^Xs7S{|qC$1?)x0G=ljx-F7 zFY-9C$4utCK%^H!HoV~L-!N#@uU}{V*L2rZNz=VC46e1F&87O^&So9b+0^5~V4Mu0 zp~ohaMyyy@BlUxIpRXH@&$%x(H!+3v_g6bM{cVNJf;bC=k+&um{J(K`PW`a-1MQX~ zdX2tL!|prWC^{}B`e77p0GL&lFHalZhs{9kA*$jn;W2s}Om=c=qJulDODB@OZ5=C) zJ}AQ6G{$}3bo9fw^pq=U4&w*k7yVWCxXfkr{44FNrkZ1>ZH42)vm(C#lvTqJrJ~~Y zu+*n%pr45Z#2}Y{RW{5kKib@*^2TT)y142M-SPot=VEk1`pVm}S$LzJ?+i4O7JKIC zbl7Wgp#9#sXR1%;s+9|%0(-n?Psdoe;!RkGVcz!jihQ!yy@iFB_sDaOQ#2ZEBtjG= zMoppOkXvzfvHSG3R@4*UD>i%?7vs-0QJbkWxikEH$;=(7M($Ad(DPScxPR?1Vm)96 z$ICYiybr9qOHZEM$=mEIV%F9636V9J2NTe**6ZMeNHWiarrbY6)`dIvgu$ULq57U} zmtB;frzTu2_w`n|pPqFOw3-QpygxWBzIZvXW7ufNS8}1w@|J|wTA8cmhcRItWSpm( zlo@)`^Os7{eF%um2&ixlW?P={2>F3cx5!5!AO(){zE$}FRuajo;h#hmb7;;)?_+{dnaghtodWr2Vtjk zlu#Ky7P+k4g!0e7@y%sWV1IGZgzc4;WNL6Pp|@x8lLL5wbrc$Nyrt^8*Osy+2XX3I`rAD@AN7x~{NehWteTw_1Me z$(iMCi`YbB@m4cJ99d%6jH9(p?(lCii_3@8%|G7o)nB-38d+1lpzMiv8h;%SMSOpI zIYCQ)QpYR1cUS!N3}AM2*l1Uco8!5|SrnOQP?5@uXjhNN={!7b4(UbS?2NP-rx!0= zVKM)9m`=;L(jv!G_9_PfZ&R?=kf8pHQ*6Z@XCi5L)8??0hgPgKQ;AzEdT-zGqBUhe z&1pYf6G^mF4f2)aw+zEgihr>9JyZQXu1;>d`JOKE99x9gK5hIH($DQ^Z>4+A!f16O zkmq5n-Kso#^J{Q^4QLU)5?i)>#~b1qX2!B(v_Q~LdpCWXu?$*mju--a=$VoMZB8rP z_wN1BGQlNXl#8lNGhKqVM$6pJN60*H>}oi57qcEG_;DkpjytjX0}D?xm!!UpyeAs$ z-ms6ZWu*)I5KU~;4QDr0itSmKj~$OPcoXbX7ss^_y~2%(i07o6rPro}SNnvOQpVzR8a|f=cF2cM8)EsID*!Q)*bJT6}v&t`NTDR_w2VW|a z-E^&&!Ct8nkovc=rXGmReFeNAXdbzRyZnkKbuCb2feN+~ktcN6x37h)LzSABx?R@3lL-NKJG8CGCvt$3?#Q%47 z(wxy7#vvbSZO(ycd>ahSe9E0S>RCE=BiL8*c>o9DX9eY(CJ%=03Rs>AMYkzm0g8pB zXzeJVL&$Br{o)C`6OX(%`fFTywE>5Wc$!}x8wPAStMeQ$^O`Jm=9Oi8Z)9 zsj%GsJTFVS@Vajr6eo~gfFzs9@i^70o!_gn?&a36(imlUyEqZJYs}@C@IfW(FeSUB zJFWer5ZNx21s40qUTS?dyYBo1T+WKj9Vf3kVx;%K-WO8m@P5hB?+ zLdV=KzAY5Bf$&3!fF@1~01F5OQi0A8MRjhi-RUPTo~D!>|I!9wo=E6VUcm8jVh7k} z3%B@J!etTA4lq&t1UoIxzbtc;>N>31A@xzHRio!=#t=iFs)$V^Oy;G=cE#`lFVqaY z;{V34MpeU*gwu5W=4PyHrWw_uZ<^bVgDLx;!3+XJdn!Sa1CjnAf8==`$o7lWiOd;f zQ~<4aAoDV0keNom#(h1>K>ocNZ525nt%fm&$^YYGN~#RtojkCf08FXrY09Co%VPk3 z0wWM{xE=2HibF@Plk<$8#bR&Z?v~JTCB{sst1AQJex!=`{5|~F`Vj)a-(wF@j96m) z5BM%ljBc>u2^q$d>`jFk2A>ybLB1Zu8lXvu9G>fZx-Dt77a~sr<4Fl5r8Ex2?jc2z zTGw(;_Q8Ay4<$f_sw7=TkIsfv;0tfDy2e5@rG*MP;x+R%!~24|pkr3AXefa%m!^jY zucR5bk7oU5B~r(|)ZE{3yS-*YlQlHLE-98q!c&x;JO^Yr8E(jOp+j)D_7w)Z$Mpd;PqAF~7 z3@z31vMaQL9w>x;QrWo51;WP7C4xXvH_zc=xRY%w<>!^&Y~R^PNOpqF-Blx3M~_N( z(&q#D*Jq4AzWG-rlY){UNyupoLw5_&G3hAWAXK>7^^&Jv()cf+W|670yjVxNR`zyu zv7t)$o|}UBSXvzd@C}CX?E;^)Dt~!T0er>R%4zluphP?_SFQsc8CwAuS`7mPWp9H- z&Ad<=8+ec^o7F?=i!b0SOEwa=oI_TX;$$~-Dj= zrHR=b+FvO4aO0n8F}0r)0gnNUHU>ErNKF4c%>}xDK~k)dyeiEHZj8FDvb5z2CLZ1M1FWUarFX05Cf(P2eg&YA`OR^PWN=vOl!=tH`>x&_(k zVfpBaWzHNqz;E;AZZ8!6R(4k>E^a(YLY!IV`jukNgyqYT0|xIO?+h9n>WxG+Td@m| zVx1zz=xsC3#=Qadhgz$Te;Rp--iLMDm@s6d2;A;xK)(0-&2q2>(zbWfQX}U;&%uj9 z%Z7m_MjlIwLw<$q2^(6P{J>O+cDa!2+N;w1tK}Z7Rh`R@A@HgUA$Oq{Xt4-N^`bieQ_f! zHXJkSkahj$T=(l;1>k47ap7C0Bh;?dKQi0AN|e%4+Gbo*LeEA9RjEI5UQ5#qI(r7q zcV*honvPt)qC;Mv7ZKBRA?>%aLrU%wN43JS#>Lq4Gd+RcjOr)V_?=&S-rn?8qDhO< z{^WJ*YEd5DHH|d(7dC#D0T(GL3>I<2=%0ijgtHlqC0L+YaeAW2i8wtmsYBZC0ech+ zMhozN5zA`wNQ+tiDrqlNUb5|M_3O2iUEN{EjXSekO6zaCpF28)26S~DQ}4qp-J@qP z^n%e|z_LzVnP$6IlQm7UOJX1EOh26K7*NfI6;>yA)u$9E5rjveAZ*S=94VT01 zNc`|J==vhj_Z@hw{}4`tQS8bN+&g6tb;VKJWbZ)iWnS-$bkamDL2R#v-d$rC7CcBp z(g;J+CoHlW;IgQ;sOz`A7T@|rJKiTZDY`y6$6Cevv< z3ZAfZyLbOL1Lka`W|;7);^jznJannOI%q%xT7ybFH%*dvNX1c!x{&|i8%35P9_ywk zQl4x&AGYPuz~$_wg)*rx1-V_Iy1I;xx`h}r#I#8^Uwi3a2xWLN;F9p3d!FeaaB5-# zXe{D?VwkBBn4c+{k!v&b9%tyjrtns_g6!+4s`$tMF$tRo4=se8oLFWa&&yiGPXCv3|v%(upPXu*$3Vol4 zS9Y+;1gN~dAOnm)+b3yWVJbJyuEezTUtUC;Br12NUS$%+FF>Of4wH+|=`bHW{c{;GYb_ zB?z;=CjUPYek*D< z2$=w6-7+qNv|dTlH{U!)up!vBoVf(t(6s!qfZSYzbE~TLjPM|!VMp4ANccP*4CTX2 zrMs7#;uByd7SAhT{lH-RNSZ;NG*pSZAAPU9wHL^pvE-&=Ww`UCiRIeYjeuC27CK-J zcp1wM>fq}U+0rys7DoLs*=4&c$%I#87qd(jhD~#>QYtT$0)CyOGI$&$L(#QHd9_(` z&^H&R;hK2;wWTxs=&1q zKS6BcAyXs2c2u*okD;CI!k7dFwlPH}AjeSrHsVe6@T4ySJuJR(%WWwsU88nc^(0Lq z$CeCt@t)*vLXNMX7e>3-Hc)W#f|EtrFnxBufp|2qW=Ar^06|-4t-Z(2<_@H9 zXY_yp-}g`CyUyS}?NYn6T9Wq_Am=vjTe*<-;^Q}cGrB!Gz0^ysg!AOiy{BLN2WqVi z(@bgXObv%4~!SR`6@2OR_yu!p_4Pp{EVXLa>??+G|3&}UVr6p@VPG+l%0OjT(Bu*4v z=rcj=855^F@-*ABg+0iV1Gf&7WV@&$Q75umkg|G46}ggj%@+o0KqukbrKf?}=!@@F z2?0&c4xQSChG3-cn$=)|f|T50uI$dZx&j7<3db5QNE@qaioKLrAgXFwztt1pGkwYJ z(-A8#;JOMrWqz$Q6?H&c?kA(^+am=+Z<7Sa;P3|I{tyGx+YVptnsJoLuh7B1e!*_8 z%h~{LPMxQKj0H1z^?Bx47HrH72r{rkQj{-o-Z&x=Q-qm88Zg$5jEh+EHLw%IF!M{7 z+FlqxOVw^bGmoAfdFiJ3aRyq{v=^hSW{mHJAFRs56oD=P|3B#RPbs*MtB;%VEYAXP z;=%@#G)OeHTmHa(71I-LxJ0G%>3nnZ=0^-C_E}^YbAYdWU5?FCx_8=b{X@!vgFJt@ znLoqRTC|E>{e$+e3b=q(8CU-RrB6Nk2)G}+XxPdz2OqqU$_9rtcbpP}t9YQjB`s>1mg3w-H9O=ZVin&Ac9i)KC(*maD|!heJ)paR$Tx|*4P{+)<{ye zLhW4Ek35SsXmh8mhW1d$KsF5cJ19FIn}**Vc+^Qf&m2F71X~@@`KRD8o-&1``Ry=n5_tbkB_*#apKTdDKZp{i@C%FeFuK ziA041Z&tz4Lx!A(j)h8^1gsUHz=rHBLU&#eZwdtyHw?B!ng2k{sE@TwK3-jy5bW}O zZ70-SJBCNn*MSum58i3UY&eolWet$X8gWH8(@(OUHdA>^$UZEUjw57zn|q;?ow=)A zM+5sn&}2q%nSMF0_VVg&Eic9*@;}L8m|JX+cxc-Ft_-CrMyuPD{&Gx}F_w>39##PBj)NC29Bg9cu2($Ieo>9=T=LY?Gq@)j?Rk^mEGj zw#`^6P8)!CN@Z+X$mk3U6E%x1fVVgmG72XT%5h^<4Q}-8@f38fiUJ5~UAeLcOSLmX9 z;y503A=t3ZQn@qEB`Bzdr=E0-sh%9i*;-KY_r}g>!b=_DZsv%C=*h|u`-yQ=z*MSd zYn9m7(c6_I*IWPtPopkE-0^Kb5j+LUdGiielq4C?7S#C76vbG4px>3 zOAmHHU-l_W3XK$O4G@`LxXsSCq)a0Y7A;t>)L>7fLXxBoKaki_B}pK4kdlY)QRN+= zY}Jxx!LpvhnUgdsSNRiRi%1$63~qgs+3S95OqHI!zSHDYm#UV4HcjlT@qMg>9R&yg7BPB~%3cDD{ zV zX~$Uz`GE(9B6vly%U{96ZU&!4C4)v2hR5cChn>hEWUvX3qK_k(WZ|0T9%4^A4@Q?O zATzf^v)~pXA{&2htu&IF$WvAq=mLa|LehDFU)W1R5$B_~7o&2Y0VTq5o@MFmB1He9 z&Zd{!1-_}_DxqetJQDcG2kLAO;?32@aCtP~Grh&Wyy-~rKVBIH&7MNig?30(YyD;O zg-3)laie+&GP(Fdu(QVVRjjurs0WQ5K0EHdo+1X=|!C_<{Q0^`?l}cwCyKTwfj--t^ zX!_&45v>!qJO*Un^0c7jxrj`tL2jA?a!3?IPsZ>eDc8tBN}_W!mb%sGa}YR=281lR zjXvS&KiEEH4CCY6FG`0IKc47W!MYK+Wgw_9bi+7uDjC!IzFJKTv3iYP1k;5gq5jKZ zcrLA+mdeBiX`PJ%lXSQ?3f;bc#AJz3ug%f1zvai+Xnbc~#W; z856eTE=vmLqZigf1|~r7W`HO1aG>rrIkCp?u^o2MBbN)Z|07`qFfcI~Hso z*mOtiQa}pO#6*X2-^Efn8V63O22U$?e@cT{Dne%IFgBTuF3xXT5JTzWP^qv~55bW0 z#=$rvsX3z-^ygFzmGUf37IIV=F5}2lR=$iGdOJJiH|G2D8sjbMQ^tBo?Av| z9+C;6uM2L21u0QNMPLg(;q4Zk7GIWK5no`oV0JJr-T)6wkav`QatJ{yx(g?$>sceJ zM^ljs6p{^A!vK%$A`ZqLj-NSXeN}gxgYRGjeF1}KYkeO})pa%et;h7TYpA^%a6XJ| zGLZpq1xT7C9o9t2enQZ8KJRb8HqvkqKMWh%jqzCQ4>rkll@NhNoQZtf?&))gZ4Ce_ z`7fs;Oe%S^butI}@64TLf>czi4$V-xui;)n{(OT4^_4!&g0lbPhh7yLzlxVH@)3WP z!2`3RksXjqAVCLMYmZWEohll^pqG`(!lYrp);ynG;c!JoQsHc*SPzFZCh?_Sl5E5c zOT9Z$^JiKFx=JnGB%WQ^i7&C9fr!+ZV71dc{nSumbwI;#PD5(h2faH5RV}?|*jg}D zFWg9aOH68se4B0g|EO%{fl=wmDJ<8ia0?nzp)mkwTBsVhf&p7cHPr19&_ zNOY8C4ct6?`0fqpdZ?4?b84x_qLG&5 z9V0QuVF%AWdPhcoPQADguy1zyy(@E5cT~%n-tVdC2YNi7qY5{TU<-<9)fKs=tn^$r z5Aj)5ZmA4 z*a=!}E#2+(J&R_Y5W;9J9{-6yrnV@&Gr>97xzE_bQ0+;}T=sQ?Lk4dOVC7eM=R=>x zGz-ssJq)CRa=wA)U0{NActbT70c6EA9jVTf{^JAj^Vb}Zoh$iAJXA@_mRBV zGSys`;y-!k_y@g>*787V4fvxkP|JzCRXfzvawruw@`Iedan2sb-^W$wO~pcT_wL+h z7%Q5pU6imll*X`=>JEFoyE5|!N>9oop8FQ<_W7!O*9y_Hf( zd$#2Xhpm3LrBK$o^bz4S>$yFny;f_d$~62RM+WUJ8K-YI-qRjf>nU8sNyISa)-&Kv zG9!=f@eeDD{hjAsc;t|foDh8}OZO%0`^H#XslR}H9nekU3ZanX0C_hN9Kcm}-|e{U z6%w9%8&ZG5f@KR?1(4ZQ*Q~w8*4lKtNabZNjj~@adM6MG)tSlaV|>WqTEq8GeKv6r zZr1Rk;NQN=^1U4yBPlhNRFPfZEDnqM$=o$*Crpjpy+}>$W`NT{L z=0$YmV%N2v=v4Ez{By`&*3sJHPo+uYs$R* zGlz{^N0zkcnf72^RUfS?H9!N}aMP6|mo>NgT{Nf|J*Bqk`Oe@O6V0+1_@9+lDW~>q zH+J2#9kXrrNrxSM!pbrEs24^Ri~%)~HSjZzKDX=9SRpClf6_Un53Yzh8j9K(1JKX) z>j4OtYv8b1pp&Q?CHiH?mlFe|bCxz>wa|+~=cmNr^5!LNK;aJ$$*z^yx+y~R7;S)bN4!>(Y(uW2UAA>Q&i1( z#n*ha+|>Q?pTAceNaR{EteaX%2`?IhR-$1_KSqpdqZ; zc=l^QwfoD()5JF~z@$@e>2CL{kgXk+PQnpJ7cT7HxjXg|P7m}Z?d#~kX<{~XoWIr! z(Q+hDE;>#%EGwWk;=Y4P9w*8iT)(I^=G|)ycpczsA*rvc zTiR%#XX&i{RZ;Z$=Y_5hTAdE7L>US2~ z?Pn_uq}iO5ds-h@MkQUZ4}!x_kNnO?tfyqRB5J4A(tK`$he#y0zil##`*Croa4p?H*l4 zw7l?w3;{d&J=(RcPc z2dWsZ?LN5%_^L*LFLU*=F_GXLKu@p@}9!74p&EasD$iL%(kKbJ(n?Z7>5Ho@< zOH4f_BZxpA_R{iHldG9_{KcT9iZa_SBiqJ>VB<^?)qa(@z3D`| z-_eo`KM*{~u3-HFaoaU76D0_A&AZohCiBC8%{hN;AVPK=sVv10?7e;D;LqiS&mkhC z);w1MFz`7lo$b*8d;DFXaPAcWI%2|KC|?c5gc~0^?Xq@+@&t$Z4}sx! zqK=Z%Ixk4)!sN0433&aha)J6?bJHElkzuET`gg~(_aQA&IBWy#4{S--eQmGRtyb&( z7rx`DdC`gOjyJ*sWvO8fc;#M3$~g-;u}T2UsEZBYwmd=+QI(%4I#M1Z-`$VwYTmCwg@J`KUWH1Fwo8Qfyq0jG4l55P*2jT z-rnnIwJrI3)p1r@|Eb5gotbvr-FOCB{VL=zMl7maplw#dL&*QsQ8o3<9I?1dcGYM`?!a?-D7S3A!*dOf{~!xGY^Wjh zggYO053*^j*|{b5T;`rzetrXQ;mRG;Z!gdJI%?XCcxV&_#$!A%9vx83rdn`>L@)xs zq8AF}%W|8Q%&M7GU#C7n(VQ$;!5C%llBZ#9?nhr+HzD}OSjuV*xxXSeS@~w_-F7?t z0GHZdemF)dSXLl6DIO{a&{R6}om(rk{qmxRTVfNccrjK%IzK3PdixY)9ryoW6O07N zPUbsUOK#a=Tk7`%{x&BocIC@lE(#3S5rh1R-J})6CzvyYU92;MA8rqM^;1q%OpKJG zMAwbflkPMx4LD(4yNoSmj?QC~Tp0BVFWc2Vt*dCxz0{*)G%x{kxs~&JsM$k|YUT@DH4(tIcG!V;|M-!0--(x=xR zEv;|ReDvPi#)m$3=#ax0O|Q-{oe;(tD2rZyZ{-i_zNRw|QabIT*}5t0G>2JCn-_{T zaf9{N=ja4+yGQkACrVXvyTUW)tM!FjYcePD7loMyp|9jZ>3p9Oc`r*ik8>8=i>`?C zAlXQh)o-#tX!3LA6q*$F`7TY5GmqS){K(u+{yhc=bZxv5iYdLzHp_l#{Bp3oj4`4fg9x~RqAsjK{8+Ui=8-4b1y?9{OJ-XHv za%-P?Q&ye%JFev(^^ojlSRXy?`@X~Hh#K*2GL3>2q6edI=Fn-+}(tqJI%cFU3HbHN{xbiBElL^}UEV{WlTILh?cVJ)!Z@l(DGZJ?l=z3tEl; zjGcUPbywal%4uE+5BX$-94I90?TpbykQb?!TKC-|ejh=wZscQ)bNnidT)Nn}0fH4=QzGH(qH z_WH@I&6E)1qWU(u)Ax3X`jcA>$1TQ)-$2qHaF>iVc_pQ-zZQARyyTIx|1%GiOG_L% zj_Qh%9G}Yb9R(%OC7|jsyuf!TYbZ8vzT&M`?CXAY2mf13iX!F*&X+I@VlfLDP?ve` znucPl0}1AAJ18Y3tMp{$wg_nFu+{?etbMHwQT38{X5uwHN+QLUA)Iu{Se+GnUDwH4 z3Y>=Kz?Lds)lIm3@=(COyOdJVLdd7_X4{vYuVmE5&L!%Ag#U7`F7E_K&SwhPR7__B z*ZGe+Hi;ZU5&~lY;GzLK&q#H}&u+yoCJLtY+VfD|l4m;9!Ir_p)c-8NoMVLYz#gF^ z@caWE2Y9^{M;u^|HI#H+XYp8kd8il|C&NN-_@z_Hl9`4h#{xRh5`mVExzR;-_EsV^ z*CorQyx{?>BF@#BJy6IouiGHeaTezK7f3MdaZvhO*AoYr+&#@#s znAmruIIg&dOJ%pLqAN8;mX8gpKpB=;j%=$HU%>`4l?I>?t^U0!A1^q+7z3_j7s@bi zkL0(7-E4SdnYq(b!Xing?#?xeX(uCgXQ|*_Y0j z-DtYNPtEe9noO)A_wl;bT(sM)yio5HJ)7PBRXIx3ofpBzZQ748dqTfW;9v=1629ar zWwZ~z&%Z!?Yq!&3ldsq;9vhU>fNr8&F4UUaRE-QuRzyIv9I-!n8gu%FtG+A#`aU2+ zb?DSqbqoeW20G#DyF(Ad^c>XS)L)b&`<>;Qe&JQ`zBiUH$%vooY{K^slpy_N%U|R( zXZ-q?55M9Uz9US6KJ4yOh&uMm732$Yh4M%FiNG@W{8)L>^-N|_13YxV3RF@B>q|3~Zl6+8($gcFX*RzFUOaTiiNK?ZeR}GCW&hv>euytJ-l{ zm3rRfR>cUh%%tQhuH@_25QcM*%~y2KzHd{Z8b_*|KcSgY?kkzyRE*5d?XLu*^K7S3 z#j@8m6^ITyDU#z@rzcCqb?`0xo)W;6ohWUS0wkwZ_9WQ?gdLP&wvwJ+%4b}gi z$~|O)4E_jXTX#SX=uHDc6Y;6{Fz&*uy5U2DW~A)GOg}>g5Y-JxNL7T(O<2S&zE<{! zy23WqeK**nnln=gT}p%P>;VnF>+X;!H{f;iv*v+eslK|NYpU+yb=s%0cs$G{j6hS_ zt$?awAavAda?j!P7i*v`6xhw^>!dRCa`?qyev|^wARFra_tbQicW=Tp zt6UEiK$pdC)4+&uc4B+0xJh4gW6p5TohE~Tq#n~laL|-YYBfe11tUjIO?EzNGm&m^ zGQUfIJDn5SdSxL-he`wP^q^{|yh^El)qf^jRQyAOLcoCWg$#L`0!dT#T50kqAm#;r z8}9*Q%v9yuH6v()1g%yz*pwTS-2D&~lW0PvH@s8k@Rg8+}Akh}&f!QtVXd`1WQ* z4rBq+7C$9j=06Cj+tnfSVl@Cg0VB^d${d1!oJ%`)dA5!&yo6aKa3&^KvxXcy#Od- zjF3fSkdVqQfF5fhbQQ6OhmV)T0xe!aB`1Cf{pKmOQaY5Mu@+hPL>kn~PI@nm8NIuOJJ;xOrr z7e}*U`1QWw@ZHHv$XT{E0?;y6%96XAEx@`9mwZz~1RLG0k%F0E@=VTev*~k6EBb9D zqAjN}e;#*)hkKK$ue~B7m3e*IybFj7-S=KmofN4K6dEnOy;6K2>{7A)I(v0Jmv#rW z1V>y`rP~7Ibt63Gl&{BgMfKTk1_{)vT^aT{L*^x;*A#<${Z;GfQ>IIQ(8=e9_4EGW zOdcA<(f$b-DG68pO7;GaIN4iV-UO#HzsBV~WxA*QrWgn(sdgNWtNST5GB< znAIc=`u0;V!4Jp~vHQe6`Sv8SZHv_IVM_=gN^KZKdryQ7t}QQGeGlYOjz}GE8*Xqd z&b;=Xxr2QSIm#3TRRBs%r2hcNs*^eF&6KwqpX8R71>*eL2}f2$n%h=(wgPVs-(C1c zA{eTX<$xyzpx#9#2RzdaVAL=dZ6SV?nL}+4Mnh@~za|sE%kDVMG6z>SsCw}wj75MB zN2$}ymhPY5|Er=lZ{c{S?q3zhZR*t%I|tLddbgShB!Tk-0ioKf-&y*9{<>qgwIN*p zWpo(#L)nI^zvZp)beeA3R|oQxx_VG*o&_ytEGA1HRo0)pi)!#99r|B4G>ijk`+?)T2cN&S|GVYc z9tqBn$QDDX_Omi2Q4u}z%*2wTM1dx|P}=d02z_=#)g-=9l3lX?{@!HAvTasv8>9Sv zd^KF{ya?lH3vA*D$gZB-+ni=6qWT_&bu51I7BFjNKh~_L#3H%M!?|WI=6GSK>2pzL>fXE5I14#@i~b z{4o`Jy&+toiu#DVB+S2q) z%9=l-E?kqBe5pQDVaxBD3WtXYMewlxCdFm>&IBOAS z090T$?TEbtWr=;IIP-HSWuFDeZwfn*2WJ$71kCB*Uz31(hQz;rZ_z+9TezUWe(oi` zBkh(iD}ohXZA0d%wx_BFg{e1J)%4W;5{ncRYG7)N zs8!4`&&0=I)mvffyl?9GKe*f1&tIoQdm>D^1_ej{?W<))i&n$~>`cQA0NgFc_{r?V z7#G7%;M!7lPH7}FMe_Eqz&2o+B2&T-zkd7i&e4;QB~GV8wt>h#qikSRf*oL|h<1}5 zlEIsPX!FzFhtaDe{s=;Z8LD&^t>-?h6N9lhKF0>l>;*1|iOlX#Uwh{djz1wpn2=rU>zH-; z21y$rDNm`P{HGIm4-z{r58E7DkZD2P_d)9omab_RX~jY+AOJqsW_fm;)dmE^9+c61?=5y`ayXK%$gbdP!F#H^%Klhjft3YA~v(M)z z;_X5vqN^6%g}J86w;|o~muo-~hwlrFzvLCjHc)dgdCKA;pF`oslguM9z+RBvv$0Gg znEv8+bXRGwHD*#jwA^t#I?GTvaQ4BKKFyirrKO$S94t*v;Bc zvHwz{>Ryqs$P~~eB4CD_5r(45+!(}e5FQy~9q`i=Ch0|zQN9m2F3abOmms%QxL9`X z(5zTVvV{4cM3`>S8T1^RP0TRN(_C0)tg?BmG%<{WHO1q=J3)SG-rt?= zCA}HG=K>MYqAVHETAN4l<%|Ezop3%~2$U``0AI?ZFZQ+nFD(PM^4fHkAM{#y=6~bv zov+XRr@OOyj_gUVWhoA-&LgN_23PONp#pnl4T{Xc^Tjg%hv#Sc=YE_t zvH0u%e1cz;;onKlE0&0VgWDlZe;(#(`GOcw1s)+`$o#`<6m5Ed2Y!XVSuB*`Na1jJ zGm#P&ohaue%aL?}{?|yw2@?wxE*s!l@Gn?dz@gX?o)OH3wXiG}(g$5bMni9hDGNcB zxl9>Vwy`Ync{3gGFj{JBb^-4ja~es)@ve;*gw%Giy+gN9onyd2-(jl9oSa@_j$p zLas*22RPu+1@=^7HX7riomP<0c0mNTiyH7?_|KD3-T;TZ68M*zWE!QG`Y4OhS~+mA z>ZEXh<^vA2iQw7TjEsJ}Yz?3XcYRSMeyB*@fXucfKO8334lK#KP<5|3pJ zGhHIG;Qe^f6*BEl#dEjT+I8^(iW7@@St_1p<{Gb=9zt z?Kkb+hPP&M)~Xhxx4y35@6|{=Ld@0me~>l_-fh~+SkN*2DOX&f)B?dZTI|{SlQBAtF|FDr=~N(44ZE4x};7pYTIUPp`q|U0Mx;2 zYjUnu9uN$l6;yIWul;?4j3Ulr}6pK$$1hLE{zOaEz1GR$|9^XRBHLHU^D#zuRA!c6I; zqcLcM&dLo$GI92Du-u$NOMAkBcogVZEn>sa21hULbCH+&rR67*JjlxsIOYE;(5d{z zZWch5LAFHSZc%F%nKcuogz(~vUCJV$m{xXM_q|{D?;%E`R1;;jS8ekDaLWm}m%RRB zLwlj};rp@~1$#LRY(*l5QD3ScWdZ!BveYK)2V)NM9-`%^6s5Lc9tp#sqDgz1id)F6 z`6y)k4p3tn2DGunbnzvWRSfP_Lx3E~xE65=K6yrSQP_lGlfbSB6|&N*b3-(}upp3e zh!+B{PNM5T^mV2V=>NkoM@K_e6ht*Ry|B%UB_(L!k{M*ox#8y#7&ZY(Ueya^^PpA!tdtP1W14}2p0fS5dcMjZK=Cxz)J+ zXFrcOuW>DzQ@%QP>ks^A)e+wOuS)LTBeVsfK9eZXSe{f(UV}JI%QUG_5k{Mxl4_JP zQ0qGGJXHYn{j5GvUJMKU8dD{S(=r(gfi8Xl?zj*=1OMesJW#kZai&jrpc74r3pEv zWG-axE13(K3n?nOqM{Na2?EOc%?v-T)xMd`1kKh6wO=aCMKm^%R0EjK#mu7SyO$*4aHxdjGxsmd6AK54fdEl zB}lR0E#6I2|8HF^Z@6QcFYz4Gx;RP5S-C}_ElUl_U1y$AXE^Ctp$N8(SOLhtF^5`? zHItDOU~AeuhesF6^jW8rSsgf|D(CC(=xxMJjP1v{5+W&7JRSDijp=9|E)Dk_284M& z>cNwCkRCZDdx$x?XO97bzHwE zjzxrPaV<7-#_I&sz^)cbvViujx5Hohm~pS0#Vm^bx#o5J9h&ufny!dP6_dUB3#CW1 zR7baPYNx{)hD|pehB#h?Z)0?uP@NKSw4Sk&W;3lcq z?t!Ekxpx-z1Mz1G@lXC*MpPj0zjaU{Htp1)xLn(cW^=2I&^l^}qufU6L(^HH!J>7V zaP!2HNQmEa@d>w=cwXXP5fpa=8JRp#^LQ4cp|#5Ak&^BGU%Y^4urVjiVlBQE#UO#b zQ-{hw2P3*g)SomNpJyA;Agg@vR`yE!y0u*a;~cTBqDiTp zpf6Qcyf66Ylqa_?;z+3S%j31F@?)bOV)RO@&inALpGRhzN0Rj|+GfoW9v#2|4r$VP zY%(}qm7(XGBKJRQ{v~99{gc|ant=o8(LkCjbvGO0gmYs&Sok%FVw-kPi z=%4XEJ(}xH;n~Zta42A^6%sZZle#>G1vh(@V*9^i{UdeoPQmE{m3)+w%C2?mOy@+d zH(2*D3@Jo6;~3{HG+1upA|07p_j?XCANu$XNZ7Q~I%`5AbgpK$)FhPnWt(a5 z-G6lRM=8#lpO&(3cy$yt9DW2fD0>RB z`8)$7lYMR1E}di0xfmYG6B2K&U=F)1H6`@M8y?k<$X2psARJyQaBRw48c5&Vy~cr! z=~Lv1bDs;I_|_xUTa+nAQ_l@cGwT~Hw}gTw_!+Iv8hEaqK1?mWLoK>UM9DPKa_Ss(2DhA{(Lr!Y>+l^j z*n-i$*CG2D2Gb;iYwT`Wlys$)d%$uBwaBQ--{+*yx%giAcOlKQ&%-mc3BPEAUpW>r z^18V-3`b>9a1pZjcvrcaM=0jTNg~FXh*!lsLv|nOkMt6R!X}tfkEe z?-ms5@Yk?W^2{zU5wJUmRh^lu`)AZFwfX$Nfu?IQn5(@mQyq;NmhoFuBmTMhD>&{? zYp59CYc`z0Y`~g)V8k7$OF>n)(ljgFPGRMm7Q;QeWU~ zV)QzytUSO4ubO_d%v>h;$JTyDdocJbGcR#xVSD{K-|cqVA0-j9mFAzaPgI@Lc|q5N zY5T7VP~*>|%abr}vc~qF?7rFx+Ni|r$ftS-mDFVa`9tVzjUIs2MJKnEugI+8wqJu|KTj6D>MT$wr|D1IDVIFaXc>qGqW%PR+X zon0qbt@a79Jt{@|E)deSJAe&BnfxKiW0mV#8M!^0UEa*$2Yjx`Ecr>6P)M>SzqQkc z+Q5M91|nti?)#`MIbC`Y{%INy^a2#SuGtq1=c0M67h8^=sW&G5NJ*{2kKXBwO&B@0 zU9fYrtyFEGbYpJvy0u7$=ucaX-)wH^gigVK`&jw5L|DwIU_;(H9 z*%lbqEm*DOP9)FDKTF+(R^3AC_NDT8lCJo$Wrb|)V_!)UZ+^GYhcY)5nrxl2-{uTU3w(3BMWY6G@_{-Z1(dpZn)An zHMFn2?HzXOXzy~Ad0ifx0stgV85Z>+1-iBLmvf7H`S8sneTSV5qwVfr_J1KFv$bJ_ zi~I~W`HV=p1C#P$G4#NxT71bCjaGX`28m$9URZvB(8BElHWVnu9^y8ni{*owaY*{} zI9XIxoLVzft+THsI6n+xpNO z1_!@b)rEzn-OSmyP^OYMjprTkQq^m%y?H2HEpgyZ z{*?QHl4T8Bjg@u(tvjJn=Y2k6>j{?C8m*vv?iNiFZxASr{i<)*PUf^NFZV$&tsG6g zGJ#gb>B;4Tqi!x)es0>T_dZZ_)KZeZmDN3cHJ)v(X?nxiM04+z$}Kko&B%bV=cQ#c zoY)fD5*Gsjqm13vO3SdU;r&PI#;f)GDJ*VUx$;x=M^1(Y<1v9SY4TnXIXm8+ zAUDych@4-Dnqlt@ki?*l{Vbq%nDI*NsD7wSlGkJma!Ng3J^$D$?;}~aK0ENLayg7v zj3SXnT9=bo^qwj+jvXQ)Lg*U@8Xg!2dmPO=Js~d8P$Y$u;LWqy_jJ*_wJsdBK2aR|BjX?X}1q`3dc$|sn zYpq%oEpdchBR)saGzOzUlJU!-jw{yYs4f7MAIE%qHHOP7&ch#t!}b`pw%!A+jkw8u z`)p7CQLSu4Q;!BQGBTr?U@MDXE2-&oRK6NJWxit74FtM6RN#4BZ|jV)af~t|+2Tp; z&(LAt46i$-7g~@`LGMQT{A6K;L1Rjp5oU9M4${=?;itFQjb^nnhMb}3#J~j+7?IkX z$ACEXfM5MX_F>Qd_?3fktk3xHSW(38d#Xmj-QZ@HN&P*q!XG zta%6VqfmROe!`6_8Sa_w%EB+GH#2d7@O7njnIYpS!OOM^VkLeRos8QBf0?oz+*3yp zY(cC`FehH9F$nYku|lK{Sz!Mk@&6>)ssD-!2)Bt)Y9{>6AIgsFeK(HEH`(H-!`j6r zv_#w?91B3Ye}t;$(74-iul3}+@$$7^eA-d9P_*d)J-HzKX$A507$ z|JJ&PYqULk>qxIl?;n35be2DI?m~w%5-DwDJ*0QhEa~Bku#RWF@o*bCDOUYtlB@!F ztM30Kz${T*>p;{tQSNJ^99NcVuHj1mktB8`SQ1=%U(deHTKQysU3xstgZqL7>(Y&2^ZvTqvIHod_Vff1lmhR+uWTGl(@wiBf=Xr}oO*X|0lDpn;A zl8}FfV<#N2_Qv}iZpms9YU1Z6Gj3*D$@q~79dE<8^&V&a-U5dn{lzcWzJ?b$o-%07 zSY0vwm~pw}eQ2PXum>=bysQKF>^NwTz(ZlqQqkHR2&9PG z&QXFM&2G|$7!v&K8s7SQB7yAc#>9x8pnNZ-K#`O%rT*Qu2k~C%U{>IK!N+Cs`l4QS z(P{Kgi8kUj+F}GXsI5w{HTTp+%$Y;mb*MLOZ6Y(kBPkd%{_TI~|3Tv@_#u&K8IMK| zlY7tZvEnl6=`o9FXWP?}xpMEi14X`eT~X!dDsrPeOM#3BR-Kw}u-)imd?G5d5OccL zEuOT3OT;HaX?j{?S^T|YEjDI`;PHg5v+-MoiPninhJ3Pej8-@ZKU_uL4*oZoV&feF9~aA_ z`(kQi{^~)XXtH>LZ}6K-4x|ZiBbM00tyFQ>neuWYZpL+mEDn0`-l#@d_|NR#*@Gyz zlw|+~_L?i*Gumdjet~Xa>iY8mR4FdGt3X>eBZGD__=x-n%g3=(F(@>{5CS>v;7u3v z6ks$Lt1Vj=!f3=ZYp!pSGx|C>5T`?6sASHMLtj94tg-_TCVfBbKW5H9~4;&{g-kG z&|91BQQf64<2SKxSLm&Nr}Wkh`~w6QB4PxGpv0%!{j(Q}15tr7+5S?Oa)gQW}}@1O*q ztI&hm%F64?GrUy_@TDb*ktJr4=`$SyV)i0$4AzZxsw1DpS1>R)Z<^Fe?(%L`)Ayi8hIiy-vs9il9xr~{;Gie?;iar_U@~$Udkut zjuN|((KAZtO@;2d$)yje*J=b9$vLd{g9HsA_1tq^kp%*%)`#{&(z{^tyl!f5rY$SCsflB7WX+LG`%mhag9B?`O1_g0`7hL%<-s+Zd%B=C@;uh_nU&l-0gyyarzkqFP)4B@t`^+mPqj}$%~(nv^V{Y7^iyZ*1lf01%l=#U z<*x`B0&8re$KL~|!cx_sCJ~Qz<1!23oTZp_kcZ$tF*npaItr;zNzyunUm0%;#91g`Leu3$Kyg}MNPQVxIuoY; zwRQ~=xDDim-L`8M+(sD@6b!aHMc_hhwP?snI7!A}bn!BLb!LBL{}0oN8khtkhx|jr z&jQ5#der{z`i0ixcgu5TBklL;Iv-%U0A3E{`C+EwZN!I~WQx3YQXHr7K^sbfO=gx% zz3k;K`b0olyY@lprFP>0W$F6>xVKJ$XV7%vu@Pu}^rrX^xvHy(4rUd;Dlm?r=vhth zM~QWe;L%RpwFI=JWtRM)F=?T)Y;h`azFqn6nA(j|&Z@ob5#EtRW-$u6L4aM}gK88C zmtUwSVwWXR7MqxPhpwCPm2vn8i2yS+8nAI*qWAPw$(x$M#1hjSX27)?UTfc)0)JGX zY1uUs@mHa{)=4gN`I7e@lk%2u3f?6zdy%hOkbQh^0wOv$!#Y|&VcI3&%*eI119cz> z`vwP#b$MF6hH)vF$YoGQTD!{69hv#&RSYP)epf<1>j$=?9#E>IS zJ9lwaCjkI>Ry#>TRLWrMeLit%xI{A5uM?H17rRgo%3S<$9%GzYAfhg%+(%6lfg^eL z^VOa_Yz^;<5pFOkE`4=Z;$hBG&>5DIN^_>u+nT$*o$p=JVIX))Jmzlz(YA$%o_ttX ztkA!c*EQ1C5@AIWdmm|>>_nB-Bv!O+W6LsA1n6CTb~Wf^bIkbjEY<6RIKN|oQI%-5 zu`f6Fp^hpx%YW)ATKqgIVY>pHhIZeLT`+x_pdSCj6y*X=J>h?RsGYY%M`1fNe$29t zy4+c>4Uugd0~*M-O@hS92{umOP~HAzmlcvdB=cvED%8rmkpX>HZCx6wD;$Q*on^N@ zf^8IG2PSc9v9DUvaI4xI;L>CUKv~sK(P7~>qWXs z6fsb^*VE=0>>Ds6Qj%)deyU49)eVXbNF&TqmU!&>w=>!MafM=}h@&()(f;vyjHNHu zYW16jK_nFj;~z%{@4AWDpU|pEw1&MI1D6jW6Juv0BHLz7{7h_Tfq%|Q%hI1g@s20& zj}JtTk0_zio$6kJMJfTum9fj?bTYdyp+JtYM>4CHJOB32TUx=ZF*D};=^6H9##pO- zp!~*l{GkV6OowOWO7d108$XLEqs-Y?efy+{FTafMB%i;N$$de{ubiS()bDKL%t?tV z0?p1DpCIX~rk40L@MOz%I=la<_M660&_Jjy{L>lni^>m36NrcPNtoIlJTb&>YRo|@)?Y^E1;Z8@;9{JLBtqNSMaB_@}l;TcXj-@ zR?@9tC%Hv*ft;at{?QFQVgpL3BKAHn?}-g&z*xSm4BU>yPYq2y81V#n>HPxt6SztG z;%*gce_LR|V0UTowK_pzWv=`K&Vs$KTPNNiX*s7sUGBbh_AINo!K`!Oe;HyaA$B)e z%EogQDd$KLK|d%ND{EkohU#xD*^we=K7f6o_AhQB9T+xfn93r>Y(iEI(&Z+T9AYPY zcc^5qDGt2L%-l`k*pJ(3IsmZ0p&=;q0JtSLQb9TXQ4w6$axZ9OVjSEIY79cwz4>^v zc~HKQF`GOu;6;>aLZ=OGIgBe(d@}bGuK*iB9E!NI>~7b?lR=<&3)^ZQBU|M%m<(`N zx=|l%ynzE0CN|GXBBO%~kF0{nwY|qa$f?a-*pGV5qlUImwJ_hWDBm#GNL&V43=Ww& z24U<{+b$BGY{0ii^jL6=5XpvitTjx13Sdq$6>{L?{HS-k4enD+3uSb9fQjLVk2|={`&%Lw0rft@_ou&y${(C3IgsZ-a_r&p!qY{LC+_bJQQm0TXc%zuVgFR%RkjL; zZj1-M31C*UP=0Gw=t)sd{L$rnQ)6fGdbR*{e6S_BQ#-;bte4pYw>HPm$yj`{iQN0v z>9aQvC7xBFol$nUW_M14b&{-H+vEH~g~+{?5Wg(h3G9p7+Si}1*t3uESRle`)vexVQyha^zM;eG{hu_z5>1@&#~HS1ot%5j{>Dp zj@IIAki+x-IzekdQ(DPx7eY--U3OPguCsREw^*b4omOB1)m|Kz+a+M61H*gD4D%sw zJNC_@zM;kkoB>7fOKCV-R%&P!U|ad*SDwthP!+xZ4`JYSU~(ZnR9_`qE7f<${d zB?;d6mC7P?Pu4J>IxHz^Yc;rl>q4F)%#2)n zm{vD@(_j~+x@yA9?H2Y%soFz=e!zaTsUDD}sj`iJ7rdJ>-O`Uu1M#PBnwuZCAJFT( zk}IqAO7ek$9%QDFgJTPG0FaIVIDH^GqxG--o7*uqRS+2%M0$j1p9Y!kW;(3- z_td_R{eS$vQMz5p%v3VrIw>4m;of%3$hDdA1Z;~|3QNlH1Dz%kny;fkxCtf7mnq(S z=hMnWirM?{)w4+-rf=d5KJgpW-IG8e1T07zzFVJo?}gn4I{nQDscaotD^nNHs4tbd zeJ>EqF`l4aQlJiO-qd^|&gt%GI>T!W}&uCN{Hv5%-(3%P&ADv#-+oQ0`|jc1EZEk z$mZK49Xe@wP$>i`IJ`m)Wg`$NQE1=bVgh2!o)BV&>Rox;5bYQj zU5c9cUI#u?-VJ>M8=^G0aHPZPEu%ugPJla;F>=*Ba9DBhG)3-*rq8pgsKq!wGld1< z9q)UdRF!k6m9O7Q+N(&2o8?N(V&s@zOUx^$fBvkDF_>OkW)ku>>UOkbI*#lZdYODN zFBmd{DYKB$z6IO4kYiB_i~J}kE}UppYU5mBnr~yXMH6qE*Z`VT#D%@_dTGT(fD)tV zy$7Q!b*%Rv-BZpWaADQ?g~T5M_SR(f10N8(2C%PWw4kK}-wJY(I5~oi?-9v12*;1T z4lS2Kg#0)eSn_&u#R>>Pf58`n4vr7;9m6H5 zE;+k^u>syT10!JZ=b#iip{6Lz??@$apFumEXuW7pL26tIJwn-sp2Q`rvg#n=2!2nP z7?lz>FALzWReb$)2#*qnVd_^)Mm`8}a*#2p_TjdIegk|3%iUv_hXlE0D zbPd==x0F~Oo&WDtdtc@XM+UUrV2=gAse7CJKLLzoA@QMrHshP>kk`HQhN12illZW! z-k-~k=r`ewJxBHoF2P_bk8oS1jOl`#(5STr?NL22+idL6T;Ey+{-Cr{%o3n+;(o-x zgN188G6Jd}wOw-Y&?#yrmtjO<3*oB)%X^8g@?0$BFZMPAz2i&rciWu>jAHkPXWA;C zgvH>=GK}(NxSAv^t_P6`jFo8C3R|xIUyUif%X8x};u&H_>2!0@X;n_!3FD5%5rtfu z_b{Z6R6=A9!FdPBoLlP`&cKgWm8bQ){ggEH3JSrX|44zL>1@=yj4DA|myFoCzHCQk zJ4S;taHL_byBf$1ZyPz=y&yDi6!zhcBpl{<&MUhfz2xwvGU@}A%75l$0M!Y z;1}cNh6&pw0n$j__weS{wLcZestLUbR-p-m+fPq~r2GnnQY#NPbIr&W0h1 zJ~mHWR<++D;pp-EPV6t`&{vZUly`ilSo2{fJzA+D3Y(sw+syW=U%UOjEF|F(6tw^7 zUgH~#O)&lbhehzTuVDcYz9Fv)dy!o7c-C+m?eq}hbXDU+dAI7?+slMVnks20qb}0h zbtu^2{Y#|5kM>Bs3s)wS#Unc6mp3lTJ~9%_|CepjZ#K<3x|R^Nn26WuH|eQ3`!PpX zXO1v+uh=<~2A0Fto4u}{U%U?lakrK-+DQ*DRTn;x^(p5VP6t-Pitu82iXf{p7PqBh z4`I)vIEky6TwavinHb9CA-hdXEaG3)I)xQ)sy=k~=8mqzO4K217T>S|m4V><5>v;c z@X27smH~D}1SucIyVWWSea>h-4ZZibkV7v$W5<$nuL6Zu-L`wcd$BQ2lNdH%2&*~G z#)^q!pV)sMQc=(QJM2EV+04 zg)>tXk)RoW^B4cWb$i~sR9#x4FV?3W$SMc1i*q>SIEki%0&$h3c^TuBs%#i+t*pHH z*b67WViiI>n?`C0^Xi1bS0baKoUt6tQlcHjZ0NsrdkaBqJp)rJO zzVc>d{sl&02EvrQ)k*R1SE#p0y#8#9Dx7 zA$-AJ5v_8hf+3DjseXx?)A#P;R~+T@ML3`i6v!iQqK`yq^au>VSx8lw=-TIX7Y?>7 zP&*c*QRu~elmC|?^nX+Sng?K#N|+cl@}h;;A~n}5fB}LRy@^;;pvt||BoKn@{VvBW z@A^~LUI&7H-sT=TMVNtd0f*#G5-EZ4^-hnl=_E# zg;2t==IdVMqv(Bm8&Kb?NwIrF!j6H0>=oUL8k2a44Y>L*{I%w;50LJ?A1*( z>@|Ppc_a|zrGCJF#;Yto85`}JhY0n{fWhR-8S@#cyAb;!7%$AEhB-oZ!JkCfvZXEk z{Uc#EDuDHbn2V%=Sot`ue1(;gLFY*2INxE#c;0E(O0575DGvJUszeX#8CBp(=4)UF z#J~Dw81xbHQJ`HU(!L;3kep((7`6xD4E)A+0Wlyek7o2YY=G~l`nC-&VE4eRB8Fs^DPYKZqV;v@LiX2zt6W8vwTh{O@JidGWIZmYQ=B z7})33bwQs^w<`Bb0^Xb?2XDI-mw*VYs{7vKw<}@+L`EK7PC1XgML)6fjh75l4Ia}j z#NFDNzA84J(YS-!v|nLZ zl1Qf$vW~c=p_1!wi?31ksVrA{8y)h{+Fz$BCf}$;d37ebas2XS<8+M6V8zR-$u#W! zzl4NejVYjjMa+-1sD1GLHCc(2`b)1+4Q^2(q0*$6H*Xi^6RR@L;Ik2)G6#p&wD#5? zhQxW<9p--=ii;qiS@@O1zQ~e+4>Of6`7Mt&nwn_MWr|R{X47$pf4K8u=54e-`4J!% zmRR0`-$`TQYnx=U1e9P4=epo1>8G2}8^18BHOmUDyWec4-HB(%1@Kd4*hRhK%GlQ` z9czfc6)V=u%EX55LPVZfXi9q2GT78aHf8}A>~^V_ig+~Y51cjJC}PbAPs^od&y^Za zr|0kW9NZdkZ7KZO#sAiIimySwrGmvoEq-=bQm!4*w2HMJ$-C-(eM+|gJVIp?KR*2q zz2=kv#FyWjedOF0Ww>>8%+p`{moJ{F__{jV!~N#VzUn_-&FcO=YDpvEmY8Zs$O;;G z>+$Lb_XT#n!A`R%xZQAy?rM9(zlQYbXlAG(c?@j9qbD?}iqGs|fcgiuI*A0H|SGQNO z5=7dm&Td?6^-7xAXk=oT?=M6i)mF{E%qMH5bzKo00#Y52_BGZq>$|<0U}zD2eXQz< zMBR1PTkJn+7+%{`%2-!!S>si8InaF6v)gH_23NwS+gA2<%tCN|K;>Xcw#_aV=bQzlk-T zQ8vp%LiQ_Ib>{`vx_Vu;_EXo_ZM?0WZn818mMA>NOqjq+zGBqxy1}ZCR~#pjvwAeJBJ-2FsktrLu8{P4HJ~gayw(BT#7R zy|&Aru2_kWAv^Kml!6K0uF!xE+@zG}du@(?DFfTqt+e8NWQloa#^&M@o*gxyftu=~B0Z#&PG8avGugaC%h=_d?h8`i%$Tj+i@afvbYA z#3?5!JDUZ?kN!O)JXu5(VdD&$Y(vOSLf+U|D@V4vThn|=DFO5~>i6pv z;K*mp1>l3Px=H#_=z;vT@!K9W)e5_Nj|miZ?oxL_`UmRbm!wo?Y}n!?ucCt55z%jQ zfM%W*>20~{qY)BvFk=k*3=%YoAAEe`B864)^FY<)-(w1q=yfyVh2sC~d+6Jmopmgj z5{8n55@V%y$FywT&D}$M{Q30FNW#HeGJ_0YNEj-X&48}zv+}MOKh`w+b8XeEmkG~u znsnb3M`zgvKUP-0HX>*byhp6)BMqiSv>#h{DWAaKleMeA%U4qnzbswP{$#e5iyaYP zM@xUgLg=KLM*$aC*B6l&{!mf1P*zH`KTQ29`C%Hb0VFN91zun6e&bjFIL3&>*DdC- zDoVdg0-KX$ZD_P@hQY%MQ*TZ<8}DxnR(m@C1e-d=v701d!5k);nZ(GjmOCFa3Cyy|fc>Y$@lLRb{u>hr__-N=$ z&nz$P)GYBXPJ>>-o(m$Vfhzw#`hp7CM<0mI8gV3xCBjz0&uh&# zR=Q-$dY)$M1(~{cpkmuv3`dpE^TLY}zl@yEoe9T}7gHwRlGPqM6g{bG8mLHf5}|%< z1+`Y&2HYm}kw#)W0)eHpk9hy^?_xH(BJmk0yZ(9b`x7f<`i~KP31+|>emjMyMLHQ% zf(4cw8j6$G)-ywjxq4ZvNinw$XD0nUPLNFeU~7I4Gm_X#{}tJJoN#*JNz3W=Y~e$i^jpWKKV5th|G47{90(6{Y5 zD9K5$UDgBwt}7$E!A0-T5mX&WT%RYwyvHuJl4@seRB=P<#APM3@1CFT{+jKY+V1DI z0!*!~LZUykY*uW=QsDJ$c>s1_eAfsnfc8K`tI4lslm1zo`TL3$`_+RD1{)3T(5@Lj zls0uR)p5T|Og!`2r}pu~Qkzi~UPR{Cv96RifwJ3d0}kD*MCB*3%Hziw$KgTB^h-?L zT08T$n>%J>tJVS1gH~InXQEt4$Z1xi~&$27@LHAQ342&^TRCa!HfO%r5Vk3pXy!AIy(2` z7r!*qHB$TDlBZ;=`s>rkfa7|RUaPJ7pra@)uz8Q}OxVMmdG7lgDO#tW9!g-_wJ%r* zFyEe)nQCiQc3$iQNT#N@XLRHrN;L@9+w?pC8T72%N*ge&8-Q9K2fwGzw8p8^Gr_w;> zUg_;5Z`Xt#Ll}f5nhisfzyw>#RfEN?@`<~)Zh;oRUVr-C%+pq?u;OriiHK!?g$%eb z!QjB)dfTre&Cgs1SP8rK#^aFX)IscS^PF% z?xDmCI`21*A%!EXBa`MO7jdQntSNts!Pd{(<)LuCuwQ38T(~y#!6X=*3!W!)Pf0om$Il0w?X3n zP1orACYuSzGZhckR?A|MqtJYglt;58(u!`_y}6nR^jdTR1yBDH=;iQJ;p0pm%~2>} zjGZp`+{&;$w)(9#k=D+&*LYAa&n-Sm+&U=zECqFR{W6{ZCYP!>$7_C8$@Uh~Rg8Bk z(j$DTdIE|)`eJ`q*hbK}mUfN&&lqAFHp)8C!!oP!ZcNXk`Irn_}6|F=#{8Eo2GrLLsq z);9f(3wVC|Lg!7KVXcn8fv`XP7Y!Am!elKH{?(6v6rrU^5V&{wczy=WdBR?Pi?a3p z9-1P<=pfGA`o{pY2&=$*#je_eAld1)D5zn6a~>^}2daZcG{ zty5r`;tXTC*(rLp2~!F5R{^nq1RBaU$mie)nODTJ?9nuFn_Gd`m+RIGx^p^CA;!uQ zK0u5n%U9?~K^CttJ=(hjfis)!DckJY|UNWv25d$D0wfon(mI z@E(;F__xsBVh##~rlQ!)*WElOa#;V;pMQWQw7*HIS^*g#ywmkWn+~^c@$~Y#zJ1{w zau&-0mnG4Fo%`RqA4+ETo*)@lLttIO70~hPmwJu1rQKi34>`Ev=G+|?Q&U&xylS~c zrWtyi)5a!;;RsW$E&Cke3y71fd*ZJsXPbEO8#9t`msM&r;dwvcY1KlW{YHsZQeTg8 zv3M1Q0crsE&+|3rMk7zZa{iQB4YZFu!UVY5#*_2hnZ?A%l(1RHv(M{!4A9kU2>XJ( zshfsyQGWyggBb-(e8yc^*3OpxB&4;*!<0|8M_Mj;ouBKG#(*mUHPX7KoikH4VLWUu zn!H})6vmB~g-n=xI9YkNYMg6nDvcKH89D%saqt{TqFpm%eS9z>er+e82ge7LP>Yz` z>(jdtf(&V!phfc$4@)V z&(GK3UG{dKvjbC$S|r}5dY3l7Y0|6qlG3=cWi+eS)_;x@>di3eA27HM#0;O0U3&M4@kjepsB3pU+<_i^32 z43LvXcmcbz8;f53)&7~lHb$}UVNM0<{l(<8=^C2TFpDOi6vvBRI5F%dQQ$3RFmLA? zUIi?bFwr~L2#Efj=Ld>-sF>oZQ0C!zj83fAG5^}b=^#((n9}4nec`vByV!5I7TZLK zSQC#{fV)>t)uXl9YQGT5)zF`vF zZTXJkP%{@SE;CZMFu4*WS0^$e8#BzNpcev}5z1WR+JkkxXY?GV;7MX~b`5&WXgT!A zqYYEKl$O`tk4{#~0=QJ8;dz@nfcHMoYgj27o_~iYPF(tPKa2GxK)Nb(pY%f>o89Qu z5|H$M5ICD(vCeO7bvy*+({j25&cCFW2l!M2i919qkgboe_-doQEOrX&r;;tTb?1N+ zaQjNd1@>wcpLQ>Sxpax#eAW_CU$=*bM&MBlDT8g%H_4kIgbWYS9VWc@Dm1JO?e2|~ zcK`1_PDn|>*_y+sl;a{SX8p?9QzxxDuT;4*)QYt?;hHQ{gKKheDW4Wb8LPDzch%Gn zGdL1c3m#TR*#@uzcs%)WNwW2kWi=Y!5#&RCiB%&`4RiIdli!hg$4-#5tGV)yQVul&WgD=J|9W}x$&{!40@czVj#btDiffK4&)>J$zo;R4 ztx9GXurt$RXvH^!o&rkg9@8lbUBpw!I@C9!fS8YiDeOL7Yf^_5fzZpWN=}94H(Km z|G<|}G`?Y}5qi?Q50nHu!HUm>J7Ed9LqmsrAc7T4{3v>T6?{@LFn<>t)mJ<}aRH~?*D?7);E)brvG-~Zp+kB)Vp>yA0)?*{H@#39w){YMHICpp%=ASi4VHbi2XM35t4@hDgEBCA7j&a(G zSfR@kbU_&%F0w2GwTpj4y{`GKPQ10;o17CP$QR!^l!|&TZ@kFAdCQ!?QGR1QByNd! zYoF*j@>~@VfXPxG-Qqw^%Gj=q|55wwG;kB(fU_bex19vue8?k#Hj*;O!V~xX8Nbcb z`x$2~hYznTVjn*Jot^romRazKG}a^M*eiv!86uT9@0l&og+_=%UfUX#>oebaZ&Oig z1Wy?W8@|zvnn#`j_;H*@v2&b=BW0`tL0Q;~%Qq)}ig*ctc2u4+jW_AI#Q1)u{GW^UCvvs9$XBMO6PUR~d^w zefVCDP>86DV1a5~s0wdLdznTV{kHP=+2mz(i`)r#;;S&|nGAe;I%6q8tKF&>rOmHJ zX)3^XubHSVKinBMY8xsuS+%3zsoX%(oV(~3#=J5)+*oF6jB!<7f-jWK!$S{;rl7)q za6Js+s;$Ob1DT$@XZ1IME$M!%HP-Iiv2NaU>oBS+6ebeP!S3r!K)oJ&cH#G92yj4} zFHb-+8Sxe6T4L}YIkR9sKW>+a(B!a|V?OpDBfMDU@-U-{h~(sJA7Yh1J7{`7N+{LI zZHIbMa%3)KvMYc2Uv8w#@(!5HQ5H;;3^kbh8LkkYocCgh!Cl0q z&EyKXqscae&GR=nXFoVzlP;lI{;c6(Xv4JQ0JT z5|pSOjOM$=23+** zyNx`yiu_IcgO%h0FTDF`;eDrMi7ial|f_=*2@#G>s%J5Pz>Whe?ge4C)6 zjuNlU?os66)z*}4?vH0n{oj1p+APCS82%$kcFCNbNi{pmrW zOS!Ar(Q3*kkeY#S`n2-m=#L4KPq?lcf{aTKPNWuoTsd+f$$E~hVrAE26Nf0jGB%oa zPam`urB8)S1m1JzUjdF>L^Op*^y_JBu`>U+cFk^n66Q2E^V9H^X8N_exKqRA+QSU; zjms}Y{m04w7XA~U`N;k5hb!i9+-i+gmyb-PdVX^c6EGY+l!kg1LqUnI@zGJORY|;n zHzIn&^2nHL()}OT@W!^kIHM70XS)`YLi+r`a^}ZJ2_^BrMc&IeE`p<1TBuZVie8yX zF$ZT_vB98mm14mWE{_Zl1zrQs`{!Fd?q2D=T94Rwu5DJ?2zMvQG zlO`El|D~L1HuAQcoMoo6T;pIO+eOD^%^8MHY(lP@6k^1Qgh?oF#Z5@YmH{Ig>dd?;CgiYGbu)T&TKfAfJ;M|klI}pjQCKj_nLQ7s8(HT0 zG}~OePn9Fkpi@Q>#ap|Mt#UG~$RxWRK?r^t*6!~edEL!J2XMV8l2m~uC)J!u%?Nyj zI4A>F46yCKcVFZ|SrLc-Ti4*y-oICgHk0KeSneOlpHa%v1T zx{9A5Eb$mV{BPF(8_nLTe%g>QeymW-gem&65GtY^LKR2QR#-0*gP-I$XCZW8Qz{vp zrdyoi^3ONCWr1E4Aq02EzlRn$*Hg>0yQ(2GZlvYOoXs*^p$sC@-v|???kQFkHe%62 zLsPuVRzi#zCbCW88LXdXu)%`@szNl}j{ zb^TdF5T^pTbPwn4+AZt2?*0!~_JiAiFLNg@fjZ|KP<6vl?A*}%5zww<_?s(t+5f*p zAMnTNQGw%_2vl)Kb}u4FY7flj`a&`$8QcMgk+8Q;mWH73rle42Ty^TF0Qqv^X*l+E zkHp^0bK9y>m@|q9eo#=(O*GYM0xSy*ok3}RdaKo)t^j}rkx@HEqk-RXH~yJ7q3A45 zLJA4GlCFekIJn;8rWK_f-+l)yA)DccCMOpUN-oW?x}C;r9-ozgmd2tlIQP}jN~t+i z0m?_HLm+d@u6%RS4>61CKla_TgW>BQ3v5*_=9nbk_VEgcrt_8Ld>)VN4Z4`_PqSV9 zOnQTf#WVocNV~N%^82*eHl}%fk&^P4W)BFuhx4fzQqbIfQPbp=m92i@DXH%AH((*y zGRAQ0R(>mfX7b12@$z$s8f6Y_%Y$Envbi0R6&suFXRm?&0&Kw+*>>8-)azf!WpXpW zdj;??_y>$OeNVeJvGi#hoQkLZrY90F(7(a-;D})A7ixlmL1HWy^A__?L!o68a;lp7 zBmCJt)5%~%NHtBb>4(=|0=(o!Kyp8JSaw@IZXNV9@?=SI>1*$P%&$5jMlUu0(J0{z z;att^1eqOkH&b|MhdUSuf)x5oc=m7*7510`oYtm z*Zm}C^9|#oIGB%&rA&Ezf}=7|9PG2ZqF7*j*EAHpnv-my-!k&t_*nJAQ^XSC(E4?3 zowgq-#pK&ijdr`nn8s;soZ2vtC-wr|K25Vt;C3qD#s&Dlr=9G&NW}eOqPQo>`^ux* zY5H^hubuN?Z$Pcx03wm(xIJSI7VV}1_dAY_b2l}c4D5FuvzMXX+Zl(02Kb8xdG1-! zN!{aKhd)+Y-LG=oV@y^i`8AfgX<7W+*dDz$X?E1I5u=|nw#1)2E|MuB8~7|`y6h?w zH!&tU3Hu&FsFH4k+r+JjmN5*oYz$T>=M|w`rszg-mvgiyH(i6cU(_ z@s+`YIBSl-gW%P{PWa6%gSQhwE)RDTK!sA^e-8>nS*bF)edy5*+r8~FxOOE#T4I2z z>~t*z%ocFRCI5d+y?Y?j`yW5Yx^i1`nT*XXw_#?OvCZ!1`|A7q{_*=4v&;Lv=j-`=JRcX^C*Kwi zWFr%nuI$GNyl~)jLs3X>g=lZzIo#`MR~(HpJF0Es0{&FI1~5c&(7b;H@U>yCT1f?n zu2nKP8rH|A5z75tDD0S?B-~8$<_2$ zVyj$6T;q8t|GyFu8Tt_yJ;oGTCW01v2V#e+7zm79ba`T&8wc2G(UgFAH+)G15#jg~ z%C8mrkZcA?=PU@RfAI>!lx`(h;-sMFAO`LuH2c#)t`$|Fv@da-mHqHF-0?MH-#L%9 zBUFv&51jr`HeR#oA{(s#4-~!fA1t0ckH*}ua@-e|wh5EVO=9stRGKayPBEd;t9*_G zM;;2Zd*busx$Zj{=c1MNfQs4_&~IAX_+kn%00pIJaJ+Hid~$C_hVlUpGK(*XDtzvB zH3Iy5+%qLcK4#yfkV}r0FL}(ut!JH_y!N!Q)Y{%`t*B6Y4`&;YkMMfYtUD1{qF zE3B4;WJvN6MwU(%WcztBd;I>fvegn{_bt>|AF#Ty?Z$gml+~&XBo}-5t6-ZIKUvKiNT|ss%e$sf!TXY0=IXK) zKTqMl-zN!b)K(_;qQfcnYKnvmur{NEd+`NXwh5u%2$5ODVEIjL*00F%%X&nmZD1g_ zOOZbcjtG2zb0M8_lQ&u^F`3U8jR;q0DE5pq*3MUareb5E2~(*0@%<~yx&OMgs>i)C z?@OzrYT+s-%Gg#~XwSCDBm~U6>Cul-SH>c(Ek{mozV-5&a*qb6{@q3zyRr1_6zs^4XHO~952Q_ST( zr2K@qGqFM3+mF(YD3)Duj8*8${`!7Dxc7u$iN{0bTZh$XF(0(4bbk8n8F(X9I#Xrc zjM=iO^=~`mUfq44Srdms$xk|NyQYNrw}g~c!W(L2dKwF4DdpudqwrW&!!hSh*)YL) zmqzG+Dt`=pxYBy(RnDwL=h@VL7fNkann!Re1_-iAX5%sb`~&@RgMg{rzljtu%;$w` zt=L6FkytrX|MfzS%agl$gTj5kC3NmHLee)-R&{~hDz%-s+oippGoWc=R=F$n5n73- z!QnP6=34{FdWY=^5=hcW>Re?Mv-iD^2om`Qy`R#Mx7(~&l@&KNA)kB|2k&dkyJUYN zUY+svq+a=vs$0k0kqeyxWS3Pg^Vd7KC$h9yhtL^Ra&<75Sz@bVKEtM`L9`((BG;3h zvAO(h;cTVP!3{q$zt`uZmDfwnO1dvQ?q8>E!WlCMQlh6RkNs|B-g3_4X;!*9-lHQ? z4WyUSap-Z^oNL~u?*?Bl(yNK=Z=;wGE?JJLU-=vkXKB>T=WK4wueDlF(@sXhKdNGF z&}oBqVqTD;#(mw5H&a+Tc|y*p6njev?yQwd2sLbX)S3TO9xAnIzSBXeH6qr|n_hRC zH>!q9!^%v0D^(p;wuKye9Kf^Ge^{H_c9*Nx3e(T(GU?$j*J4$|8j_Wyoi| zWia+^K9AfH;p3QnFJa2jBplXDw0mP27*H6_OWg1SpvBqkPMOwDA)h0eh5h<|O4UK6`pAV|0SR9?J_}LVa^mKe4A!jeIzo!j;;8 z`rxEtkm_;E{~V>v1?PratY1eJ6-;l(Ien}-KH}C&fky12)5#R!bYs-C&#{IVzAyo+ z^ejzx01G|WfHti;8JXcARwCGY1=?0!TeSZ6Vak=Ef*~!fa%zI4+L;%!>?&sFX}~3h}gj2T!y2SEdQ(S(ycy_~rONmd1!)mX;zKHdC@6 zP+h+Cdjnx$_T=j_rL0YpK$#9G&I^`(zwkTsA^i_399}FuZ8x~_Th?z=r!2K;`Ifp9 zT&fkB@6HK)~+?@FGSKP3B>%FC?Z8eF!+x0{)<+0WQ#iL2) zM(eAYTZi9Xi>ILjgFw%zC|Qt_KYTzS-aFf}V8-dEDupH;=8v0)u_eT7m2}3Qz zyl4xJJ3(V$`@sS0q^j_3=;LxHaxM$~}T}vgCXDpUL zt>3h-S{z_ruo6D0Eq5cfWgZ-D*RiP5TYzl*>z7R-w})DtnfJ$j{;=BzVZNvNnxY&r z`8K?aBV~T(JC<#kRa~1lw)m;EVm1i!y1(;qz@g0as;th#fWY16x|p0; zwy*hTvi!*3Xp~!vB0zbg93=#^%_$VqIU3c5NQAEB2CQ3G9#N^v8<*B&(#s&idh13NxDB%$MNB6V15w!UL8mtp^q2x5hdZ_1Wq5)-^#lrdz5qi=lx!0q0T!gu=T=Mq>c`V0N}cZw!jkodtc7h|>rr@W zvkJIz>KqQQ-305^W4M3VOlCT!Vn9$A6wEOGmLL%UKtN_a;89}ZUs5GFuiUYY*w@0+ zwKKclS1il0xLB2Q#5xayLn_NBmDilCuwBZUl_d#Z8SUadd-|B?kHxXmODQue;x#}( z)Ld~(GcvGk(fMBCmF88LQnY9oa8FL53Cq_cR3lVEF5e+T-`O?S3?mc}$vzhq**$nBa}x)hfDeWm9LZ6pS2(` z79=ltJ*cW%doPFZI%jK0Q$vCxIw=uY9J8RV*fvx}e4Xq9wy#-598fUi7#{eZn-jgT zbWbePA($cZ*kxMlGn&BDXn)W!!*~lPNW*5P-TZMVDMw6QmEspOX0vTfa1~B#Rr_XD}3V*6cqrTlP z4=ot}bnEF~8kbqKYil$uU#28@{tPZ|qH-4p0t1xeJW6XR^j&$Ky~>ee-z=U+T%f_b z8QL|<-sUpG^*CqO%%|rhSx*~nDESG~TE37u)3Q37rl4YX^}9#gz4OYQ0|Sa@otsL9 z;iJfB2`xAcPIZk7Gs-o7{{@e|8z=lNwbC5!Ihmvgmy(d;v%Xm?U7=bBF9nHkV}JjB zGcP#BH!t*))m#&~P1@Xq^HTs}OmA)KVS}y$ROP94yYi%nJDXVin+0c%woJM1p_0?i5WS7$pH=ZdG7Cx1d z8ReF*4C8!9`loZC!@^*ry73nyLf|zT5>UB9emetYrDN$VGhsO zX62}h#Yz9Bf4gywvg6gB`Y5eb8lbd~P=DAs8@#Da-nqUHOs7(+T*|mw{qgMaKlahs zz<(_@N`gIjQjAE!W+asCKn^s$H$)_CF1=t9pjzX&4eD#yEgM*~Vqp*`;YuxK zyO@2ngC6Ik)sv-87R6ls*zLS)8#7$k>=J0!(6ERxvVSqy7H!p%F?#;GVAy?iw3`#^ zX`r}It)nj3QWAt@J*Mf}s7`_*z6;Tffkd%ld}V))d!91Y*#&qfi)Pw>c8HZru{60_ zRm>HZu|~w&^~hU;G3Vc(Qw1W+F-2Op59LZn1Z_4CEyev35lvueA7SxbJ&I6MZC_pU z&T~rlZ~flTFdZ~bUCOg@hdP-SJ7og6)f4hoeknBH&re_Uw&?A3o&O8OZ?=a;Nr82- zq^k}0D^|yE36VQ(bEKI@lNw8Jqb0ubb?#bL@eLe?C;<{~c7NmY+MgDvRqCoXjs}lh zpQF5f0D-KIoFE;$9h~yRjw7tq`+m|0%8qX&BcB#F7AGF{W_8-D$0-}!)qO_HkjY*@ zW&Lq@kOrn=uR7cB2~0{A+L}fU+IUCwudCZacE%|M1K|U}2(0f0x;kr}pt1U(|eP%uiMu` zbe*dAypW&L-OI|7rk&E!Jyc6@wSR}m`cc2mIUH6@)R6DtF41Oinu;r1K98Bie#}pw zGFs$4#WM`kQu@XDhNVb&O*_L1c(8CT)7x(-t+Ckgi>U`o%(QpC;bH8zR-)ptKVHUsrd7l5LsW1my1HQC&gIYjGX;*93Il&Vzk`_h zI$bhM2r17zQJZv;$Pz1UeNjGBHeHFArz5%@j(j@wtYP2M0wlj4SNt4*a$u z-Tsmij?)wx+R7(}TaT~S2t?FFzSWpS_DH$hF;rui(QiEAIff?UXnD939R_p1&%KZi z(R|FB54_Fdgoih*QBbsXSXXR>z0p)o-SZQuy8KK<05_t9Z|%n+BgOhp#)7u_B))X| zV}tCw)e`Vi8{76P+5?vyx}`DzXFk7}p~S5Hm<48-ebTs6{NyXmK%Z;J$NaB|xXbr# z>y*A`D=v7LJ0j6+OX(ZMfF5^4zT8sOgQc`ev@cvcccsK(IDjmNaJ|0#r()3P&o5rM z+8zpVsKuf{V5aJB>m zs0ulA9s9{bA`4aIy&|~(fV8Dw(NlfCpS>cw^}?s(1Kh(&lU&oW*A2rz*?mZkkWP@? zhVXcyEA1|hxQbbDhi^RVEi*-+Dx-B2!F0}qehEf@5;6=GJr(C4&WW&?GES`uz}PF{ zp-8d45RUi@oaLutL4Tp5inYIIY391vYSbz_H(2g^7jD+j)_M3X5f@%WeKr~V)Z5Cy z8+{FIUye@9#C!`+SvC2jyiuD>vsf|#z;=yfFB}XF3q80$puZp{6NpgQB@x_bIw7rb z|H{|Fr)z?5#N7dZ`@A)cnb(e#QRy3#6sF=!U|9NFfp6S&%q=1HEg#omXm{AEh7r1R z3WHc;Df35c9ke43&D=L(P!JvBo=?}e>;LZS>aqOMY3T0T`gU{1$an943@>z;P-@aD zU$ey@I}*Kh94GH5$05=@AuUqg)1U(teU+)mYmK#>An!Xt)-3uX0j)+~UAk!{T)8Dv zb|uC;z2~(Uhn{k@2Iq?`prcg=aq5knA>LIW3vu@-J8adkG}QJ+>nsqAqud5!YT<)b zWH6~zR+7~?K5n{usiH3%P~J;4ecj<_qr+|s@d3pfCl$+<0v-(+L|2!Dt7G&IWPw;V zqpM|+_YUBiZt#t{4gh$w7~HeZCUWG2U&_6X@R9rY>c{p^=Xq+yHydw{Z})bW zl4T~K|Lr#94OY|UJ@5}Bb{(10tjf>u{vCaIo8Hk0aLf3$;|BDD>ZzkpozT&F zL>Nq@J6ehiI!$L1s#sx2-7?*piw_>Ca%kAi(dpao#W*rcgvP%*m|m8Y=l!me@QgXS zDr$c)ax*GUK4$=>#HR0n+X!@C(e7m`Pv5Mycr+!ie*$p-yT&|8<+&w`=y{<$Gp$s! zE9-iT#8Z>eS{2vWT>jc|92wnQgIUKq^MnS;X9tWo3ikn3|G=&ZeeI{@zr|+Z?*i6W zRo7fk(Q#Isr6s(wDp;z~a{jyLREFEYjrh@bd&Dz0V^ePW#QtN=9EYl6p?5TI<(cPP zxS0NvPWYrIC7p^7~64=flSDn@XyiDNk2ybB4ynPs&>bIpefF&56keyUMc`k!Jz5skIfY1_r4<56x)jYnQj?59+XwI`xMOOMC# z=cAeVp5lX--bFUMEQ9sp1p#}NdWrCEXxLBW<#99e#d>cvR#l(F%Rs%3I3@6*tuCJ| zCZgeBuQt`~-aZ?FlBz}ACTQ$dRs|Gu18~pebuamWT6TM0X<6`!Z>ZbU2rkKAU2N;KHrAqrkA>#C+84MsSET^q_UB z1QNkSKx02zBAh@?To3wria-9fN)tte$!oZo3(bFN7woP?QU*;&UAYy~;wmM!_kHu8lON(XGR1=xY)hcbWB6{r+p-$?o<=Ao^wcU5v zS>q>s3#<5I`_u{7hRUox;-C_g8YqHCZg}n5HahGnffk9akfy!0%njSjHMvLIZX8FO6M#Rp&V8MnY=}z7YrO-tMUKyH)^^Zt6jdP?ockfIExKsdjJ= zuQg}mmM_W{dIe2!T9wX}vUAo-F1PmJdy+vyCp1xE&8Pvxi(GdqAW~2ESG9Bx@g7rK z+%J}w#)C7C|Ei<4LoOI$7#gS1QF#eYn2XoY|w+!OK2F~}#4RKrn3znRIzR98h;t-Ck6|MYZ zp!%N|bCHk0%Onnf$>N7Zz2B3w=GOVoPpW)X5}U@xx0Lg$N48NeuqH}R;GI}7WhjT< z_O%i?XUhV}0Xl`60>0)cQpoy3J;f1O5(9rfzaqwVTsn4KVqRM(((TiU)>@aR#$4gM zx9>!8vzKnxr65=F=nO?C$29#!hZBOt z%-}2yJtA$rU#LYn*?#-lf(&<8kUU>#Affc+HEx)i(>T-{4CC!gidYy*e#3_!A4)+o z0!K0RcgTGqE^Fu6r{~tdRvtlL?LB%fZNGynmWxo}x@wiN)dod1{V-Xu1>7Wc;zEeF z6Q)ipjNDL?*!rzPf)M+bOBn~>YN#1u%sA}1$ikk)ER;zP!UICmmteJ=vvX%p%>aT0 zv(yd)-xNmscP--+0QM>Rzzf6Lc{|AYk%sbkqwjS0%AE|(&h6&w^lFUy#GY*P=0ErQ z6S~Uk%Ab?YC;Pq%mA%0JD_BRMy3z{NS5fUlA3F5Pa&509zzZ&8y8mbD`&Qm%! zpT2C$znX6iDzIH~q2j#*F0|fRDtQD*%c-m3pO zT4?AR!ikjDZg;i#Qu~c}&VW-dCjPFZQ!4G-do(5|+5&fQr}vaZ=KGI?5cbxVzcTF} z_Z#UAd0Yo?viDd^H{h#%5>;@_H9Fz8f$=eCPO%XjpmE00k^^8ZdZfpGX=j*EvUOL1Nbt4YTC zlciuSI;2Z3MbnA*UMgfcs7vd{Z+FYpb|&DfTR!oLsX1?$E?HrhusGrz;tLODEKa)i z^Zv7x+3)rv7?Y0Rv2ae>xKLo<@zZQXwgM@S=HQ4D;BJ62hEe>Vy=AX6J(RHMC}u$> zRgslF{-4U}>4=P-#veeHl;9NDO%w>{woA?muq>_Nafu$Xn@XSldY;5riEHvSjrzS` zU}K4!t@C&q2}hP$XmwP_N2SPkseYUUSPKE0k}z_ z3A%FIJYf-qA!XZs|3it&4AH659GKO)pV67UCPyH;xgo3e=lPI}G;OrCYj7YvZod&svLS#Deury{&*ow<76ja{9&_C0%+uc zl)g4QJ}Rrt&CX20gDip7#pd%`&wx_ojmXOk>s>3(@Q_C=2Iq)|;bU`wC1UaiOOHVM-m$F5N!% zwylqCTv%f!$iQR4EwY=*4@Vu~4T`0_g9J67-=q-px)ceYt zwrFuQ4|sbJxku~_5$8pOJB>N0wl%9NhYtO0w*)=aq46{2_Z9kQCykZX4j431uI+qX zN6ob5#;VcpUo5_s@4e8qE3U1t-TU}|Dr=C$R-faw*N>Ss7yBw{!s~jp!P@n9t=4wj z_7n15Z2}7GP(jkywF}86E$(VsbdAjEEQ5uiLgiw4M58{V?1lRszB66%ZVfqtIxjm$ z*%mrqm(ZrOo!&`gt_gr2-r<_$o__f+DV)6`sY!DX<%52t=!L3-V%p&UHla5}*z-#o-?&42E$aJ^Z{ zGz;a{m`1?PwGokxNR=g5qyy3mI-5=XWb7l;J(u^Z0WOzh#A`$uO+#^@M zu7n1F6>Bz>IUP(@bBA}A)!o}6Q~>kDwFTNRMC8^e9-PiSTl!%cQ2|zC{r5_v!xpWB zbChY|=5B{>gZ=LqPj96pRrChLL0|q)#gSI406C2!69vI^A`Z1(5N&mL2_VL1$*3jI z%kv{_V=yIRsXnKu9*|Q<{qPy&j$~w)$5?z+9_Ly#NjSUTvY#u+nQegqC-Pd(r`sR=oZ+Zl{p>Iguk*=6`3!r z1%+?B3Z!8`Y$_zbH}yO4bbFUDK}(?` zVAos-x9q|CD_fQ3bQpRnHLAa5ZAnAZ_$hB`89@}YB`%z_XN5hOTcN(Y5zR7$R$3j8 z`-PY1Gn!(u2)|H$oun`Sr0}O^w7qE5(@Wd`>-$3co$)Qz0KE+RhEhVm@G<8WcvOp= z;!gB2^J?$k9)`_qOSgW;Ov#XFuG+jX4l!WudbGPw#LbgNvEE6wnBO#a#3`PU6ls6L z`f>ed5#4`dBBX=K$&RL`6=x5u=~)q171q&fJk)G+K!W$oQTyn0P zv8Up?z$*NIm%c7s`|Mpd;o22>xz$@%nI7aMZ7%jSt&5u^?o@lR-W7|#adw|=?QdEb z$c3Cm*q?V{l$yW(xU$)4+HPX~sS2ODA{Pgg1IF9-QLl)s)itL+)9bGp33fx0QS1t5IW9X_j8+!Gx6HBgOs{Lc= zuJyO!5%H_zLxDPn-_QzhK|JwM+n1APL-9{g70o3(3Z^+duAZZ+kIb9S`pV~`$@97x+4&{M)JvSX(zdXIBe<)8Um=0yC_tI>7 zy&S^#h3i0ctL?1zZd?OqvjKsscP>=0EPs0#xkP9!#Zuh(N?oX z7UPL^$@LY#p;WFlUJX*xlQJ{X)cwnb{$lLkerov04QxeH8_oia)V|$#I+YzXbZcNg z&_j&&I(2a^OBf?}D~%PT2~Z3t%x+t$*||^-VYO{oBctZE#BiL0^mSQ{7#!vSVFpthJg}O9YoSgCPV(kT2N=N`H zs-miQ_iQgAw zS1zQMGPBw1_~M4B)I6imnXPix6CQf}!R~a!O(RgeS(LT;Z?$ zRIkSy_L=16z!FJ)H|uo@tV>I=uM(OA1jc78cXp`e$_RCncqcSX(rZP2hVbdxJwH*Q z@reuDa9oTgjeQUO+*|Y^m`Wxg7)@*{Vt3<#mF!lHaRWZ1#P{_b=Zh-(gh|ZD`D(=U z^|zccfEoK^a~;-gf{81m9eY_~e7n=h&$q6YyDkikAMJ78r54xP=%l|(KgsWy(LPjI zWmR26VUcqDeu)31+-fopwqWe@cq-;SpcHoAdf`4U8unUmDk&e0sBrlvH!eVNrHp*v z!g= zdqz6stH!2N>>^9?9;=wK(?F_}fX-V%Gs&=Rtlw}q#Hs>c(&vHWFn4O@eet+n7>Hpy z;;tV1e8~vGum$ia3|GiLzY4MnHT;PnNnl}Nf5y=Q@^r% z_5a!Igt5k=$R$1ztZM<=w_e(*n58s)v8vxl6!UnmcLn$EaKK%!8{HNp$1 zTf)~|x&S)m^-AZVH@+uZjwW(GgF+&tF@CEkugF?H)d@rQI(&j&q1W7XafJ1o`ClPz zTFMP`Zs1l^bi071XMtuF1ps{~Jkz%FT+PjQohD8=oebtEtS5b7EDF+mwQ;!nqfm|s zo*;44<|^N0MHfGv%LdaE;*{hn>efLI_WuOy9KZ#NZ*JZ4=U&T!w7~%X2||u>XUun6 zFMlYh?s?Ja;piyKoiMUZ9&40b4PV2b>;G7_TCkKJ1p+%jF*W+VCSg5Rz?S$(lj{DT zA^E!!_r=pb1!@V|7S(bo&hy-}_Lo2P1}l(O;Kb|;BY#7R^=cL~qGj$%y3U@JM~45Y z{4$ZSya+?G42TjzpJ_^FSd!hAGBv1UESRfaFxtc|ANj``4=u&=OPeEFZ^{TrJt zR#2(1(}0PQqs$9be*f2#STlM@we;m^(FV9QR15NmZ*SK9aeRC4?>DUo8b;F>n8(^? z>o%Kc#1o9wo^+RAngs^W8o(~~nBE@a+?`+RN?w0^gW)B_sO6zM&@P5u$BRcVN*N#o zgS>vhSP|#0e2wQlXuY_CdL%HkX&F)J7eIH21DukP^pamE-CVExNiTkynf!h0@yNX$ zP7dTu zL-}42vJR{{5qif$=QoDXra-hT8Ljy(@4f?jr1p#Ho&4wf&s*M@RPb`;Lgky)0Qs{{ z8mGS-*YXtYY%WW1rA^+5y79IfxEt5O8J<<;1DUdP)VWGM(&v|M36;Bu@A0rmx=L@LN6%S~9{S_E=OEV;b)qw`^6W>sQe|%Re z)C?n^U?Q8HY^2ODN)sU9n#3|Hx`IQe5F26+uV5P2VPhWd{$Bb6P5`61^uN$jBDsv* zS!c@m{D}*^C0I$MU~je^>{~1^Z(SQfHIq@cDfY)~oO)lmFa<+=o~!Z>0BhKjJtpsY z?uuJeq^B*n(jG8)kSsex!(VMj)dcrf*pRrHj;ZqNb0*5i<*voJ1!Lp_drXrp-(v#` z0CRFM(9l*tBDggW&Xk|WRm5l);_@@{HYmy(f1-MXST1?kEZWGZVnboZ&s^K@SFxcyD2spbaKLXItIQud#U z0B51lV{nPB{H2KvRvUh%co8>>F&VckI$<`Gv_;+rZ)MdJyxJ|Z=`u#^lzX-QmMbsq z%maFtxpqyZkd>g;7wl^fVw@b~zLvjOG}4zDgK1>$huipZ%2R_1188ZtGM1^*OBj)Oy^5Gtme^l9m1;w(psq; z3JTV?FqfC!mOgQ1f;$T5i>==beidVdtuTZ1TahB9 zk?Pm-x=em}`|SYJ?sC_FAFE&n`b5iAb1^$EsQW7)&U+f;=@*Vjl-o~l4_boqg~1=V zZm+&8rr1olsF=u7a4ae zeP+5~y2@H#ig$;NFQHi|HT56txLoqEl^#-b%*FJ+_}bSTy1O@PzoF^J>a6M&2SjEpbCDSVr+1X``jxVNdC@0HQ zKg0etM0QQkh2U|Du^B>Z1R@qD_X=b#T2+{pG#v`TWw z9W?v=7db|X_ZlB6wLP2P)+e!|aufhk{PfIyd>20}Zh91F%?Q8}faad8UjFXRP&O?Y zY`1{7j+?yy!--+%!&4@1mU-|%Lt*ZloTXx1D(&Oc_;C=)buJ?42gB8*Gt_aj$mu^7 z_5^>y8i(4&$EM2a@~Hq1V=9Sm($|hBMXqZ^FGN;^^k>9BWJ|v9lY70{;o%^gY)Xo! z7Gms9y*lbzf3)O|+`%9;@Xafkp~a9a33ev2$ms+7s$9-bW#ks<=Nd~Fm8dSf6Nm+0 z#vZu-hIrtsRxp#i7Y>e4%MU?m8q#(~UAUkBJ;b-=sBkSQGkqSb#V|YdZ1==uRCJoP zQe8o69Xs(j4H-|J+xtdQd4=Jwm*Jo%GwtULLer-kBUniCm z2!Y!%QC0f4^jcRC)cq?W+!zLMVhm;LYBYZH0;8hU_i$!6RrzX(VjC+byxJZe^zc8G zo5&IU>A3FWVvQ+eit$yV#ZpZ&F=hvxPhFEThbVx@k_rY&GtM!d?e7u1(<_0m+KVi# z?g$a{v`7W}^Pkc%{Z(`-isJPXunmSTNOWe!c#4h^=ECjr? z;l=zNx_tMRQgtHph%e2o7=!q@QZz+bA@34;=rRLtl&lk!ek@-d^qwmUe9HS3Q($-p zta^l~^Gf@*&4exDDL!!L;ftd`K_PO~bn2h$N;rrdI6r6%$JWYm9y1u9e{rAp;sUj? zy}i74uRSXhtzX_+-hl;7AMguL!%g3`OM#t@PFY2K-x!M;oOp6~F-C{+KzvzEThrC@ zk!irCJI6jR9FNe|RyN*fz?^m+G;^>3^Y?iEgUza=%eD*^(Yz{le0)%H&sR`BeFVI% z6dS#%luZV}aR-=T5~fRhV_XJZM4r6SfGA?J(XJMs;#Hp`$HH{#gEYVe;^W*YND=D2 z;*t@dB1aTR7~=DQ(xSc6K@9ba=KHchPm}n!%vkFDbn?|M*|uQLhXBM%x{r=>FI+Mh5i^Qj@C`NOz3{{Hdw#a1OmjkMh^8pH*{}(Gamj3Jk&l z#zuBo6b9+RKT5viX!Lhnn$vfL?iBeA3d+Z#lRlwIiI(r7OXr3>$ZBz4%U)cqKb#3z zTL3t+Ea8?=`RA|Zo21SsQ949LjE~&f zMZfg>sgRj7(BFq~&@Pd^_m z2=>+5<^xZ&;!49$(dMCTmE|w2Ur-j06-}7p zt_P^yu596w7+&1-l8X7Ym@Y_s>orkov~$k++p>mk6uJAa4j`&a zII5F_>*hQ8v7r!>PVo**|>d-qW*%hWYfk<#F z7!pun=yf=$#mf8iZkK;X6-2yPs82eT7Ijkz8|*?pxoyP{jbyhDrJ=#1nVk=1P}Eqo z0(*Kws!4Zz0_6Uj-zT}nqqgE3l&X_Wpb62?NN$-0Zq~-?NfSPjUD|?J66_j~h$nk( z?P>7#TIZC$DhV&7&3!pkvq5oZ4NZ8d0_F5cL-LKxJP}Ix1m5Fs-&h3>!5GijG58tb z6dkQF8TvVDGNdFU;25dKNzPuCV&f?<-cKLtFTVVoLvn!E$PI%$@j`>0i6pywPdG;h zqSg)v^~6*i9TC^Gb&uX2D2+$wZ2+#6+F-|x`GqA27Wpu46TZ6M3!2Ni01?iFETB1{ zhH1*hnpgDfpQ~v3sOIfZeH%=5qN)o~i=)Mk(8OQ2{Px@p91n)dybx}h377M!9m|@8 z?72?{gy^G^tPRKV-T${D_At_4C>ox2M=IW~w-m!yIOg<4>xf?Bm3ZCHL1?EKc`vkwlVtYgC6nc;{zgwx&Ix8tbV5^LM z>{pixCZuC*q5Ihj4XfJ0{$d0D zaD6qtmt_gsVcR+(6+Kg(W6E$#Gk(;qZ(DaL1^+iX@4hvkBTVmH+m>x0RopemKd)XyOUo0ld64f73G%%ei+XH_#dHQ$el;`8RCh1_-= z6k0ve_9e!b-!L62_$Z;{?%{5^mDDIU2x9nD3Bpa;R}y39Q!4gp&rZimEo0!3B3B4v zZJU0E@K8z!_Y+rcjP1wnt>|mkjp&aIy%RVTQE+k9&|Z!Wevkgbp<@42$;7(1Ygy$? zmdma3e6POJygsh6XSL7|=3X=C1(=6N5TfU0{XsV~0*j$^=$X1nv;@D;N7uo*`(t6I zx8YNq^Le$G01Qd0kapwdHMSDF9BH|4=uCUN3@=eKU=;{ znnh&VN`UI8{4@Dz#fuNVJ^!g(`692I3F|nC`+_*Dq}5*?scD03|BScZpBju5^xEGe zcK<|Wp)A^Ji@YCPn7-z3cFyXy(UOgSS$}|6s{0B(Sa#G>)2yuS4QSU~a&QYqzbo?_ z;o*LJV)I+)GE!}jw3!{)rr>!K4(;qv#GzH1tW~sFY4Aw6{5`(VyOSHSay#E18tVXjqmKHXl@fH(l%$8dS0<90Z_|?#Z(sDS}Zh zhY3eOFAS>IZTXS=34Vd-V)j=V&3x&`)MF`lrrh9VEfFSCqMWRKwXpUjq25zV7W3e2 zdo~eET0(Tq-9mXR|70(rF%d)-O}#DPS4nHvs(cw^4-VQgmV8qPlMo(6W!i!!b6-a^ z)8gpz7wQV2d6VCFu{51CtCu7KIi%_L1%yRY^5hBtCeOdf#=i*Mf7(-@W&M-N2L-6R_Yn{r*x& z(HTnMbxwRmV3R2YTi?{oS}4r1|H31`ZCH`7~Jk4m1kFT)}5=wQx7 z8oa*C(a5zx!8@`LoRSg^wv1SV2^f53*||*SQJz>JSU3y5r;Rf%#_!MglExf&Q#jBB z@6j~CRv*n#7@Muo5>H~Goe>#NnIb0$3<&Wsex~EQJj0_XV%1J$>VcQj#olG)QQ2m? z-$D+Dc}=0)5qf?@08L&lpd!2E&$vlzhR%W3UlBGJ3BZ&V_r@bdheDnbGm9cv*JICr z6KZ>L*Uh6G>+0&X0iI(?MRD4(+VY#?@5SPYOHDxKcO$@Yqvw8%g+1NLk|R&UC=B{0 z5s(FbmXBLJ@&#hCTLXL*O{A4XbAw&v!LoAHE(h~m|D!$C7+d>T>fAV^k`ukv5@TC6 zJg;)N zatXnxk#V5dZIwh7qK>z)OwMuICrsMN?+Zi8gZE4PD+9Qe*%%t7YsCtyQv(kYvFLIK ze!#SC1sXO&N}enS_hhg%1?!2wXGJ!q@Nbu#Si2_=dhf_%lE{C;xwMS)168WeC0-ZI zCvj9JlXRD$B*+=h>vwyhq|W6&s)|MdLRlKYc_#i(#lidJv23H%qZbQf^JkxQ9e)-0 z%^Qtm^l+Iq16R7suf1CZ3#U5UJBq^4>s4xSU8SB2vk$b$tx8+?eN_rGJ#zg6^r5Kv zZq)M$ARfTaM-5Oax4)~Z0G%s_jVttra`pjSFB_T=h!~bBVgHAycaLYff8)pN40G}odVD=hJEc+dr z`FCFekg{D?C3@$CFUxwxLb)IFH6CSvF_B$pQCxIo0(%zTIDa~0C1BlYOT*_dFL{4{ ziN4L+H!u-M3n7@(S=HK$yk@I#tF{eI#wt7lq$x|J$8tuQdkeha1O!RA1pyyR^9cHH z#i|c{^nQ~#vSa~FngB(GeTs<8StM(hb$p~)iR!Rsg^sVqjDmUsm?{#4C@Z6~Y^*_j zZfOI7$Ko@U!e#eDyN$Eg6F7&A2TVwGIb3{mbcYPTu2%PQb{_mR0=ySH*$EKYnUBsZ zP+Ew9aONVIi@|4fP(oId?M9Th{NRdh6wqb)Ry#qii5;9jq7+2(#ncB~HHnYW9QjS> zX0g9hwv7STijf$TNmmph?NAJg9<76q_8Uhe#{Y}HdFaPZKM-+-T{7S#VAh3Lqs+)@ z$DwMpw~r_HeYn0Jwbku`lReo!L$IWF zQB6X|DrNxlV+~;wAGO}h2(x^n29yg>yTzp%qBDIZ(BuO;7lvAB9L2@A3C88MjAw!L z3!JL^EWp-oIm+n)_h@sRX9T(vYx5;j+~(lCgxwW53`M zOKg9%7>nevZagktgb9f)m&VrL@5vd^VDqK{NUV?LVKyWj894r7pD5%*5=xoKKcN9cEmOc2j%N%@EN>!Y^-zKs(GB|s~d1JkDCewh3 z!BEVe#TATR=5_OdL#VS5ev}vij3cRlU>L_Mz-^*mdW@K1p@%9Fcozx()*zMn z1MZlJ*iy2|=e(x8kVv7K%OQEPiTC-YoFkAUSIHxax-Tm56LvZ~VTh@OwOvX&nR4ZJ zbN|+jM7o0AKLYdzpT={Ho&x`1NQyeMX8472h9z{aKSD!TIr{uq(Nl{(k2+YJX8 z6q@tonj(6EpEo9Ag5oTqI}&-r<-FQVOuY$=Cr+!(-)!3V?}XSIkB1mw<5&Lghx&)2 zn)K#^pc}pwG|AHLYpIW&p1}2f0xln&$iW}_{tfZ-eI7Abj!$dqJsvkdx%iU7f#a-y zdYDf+tIHyn-VObUGLbNr<>NKMm!zOOD7_~>_U%wU7~e4c3?~6HJ8*Mk@MX&UA+Z7@ zlPyVj$}v_qVNYX*!mH1Cw@&i-6pdA;RvnJvqg6sHuZk}f4yZMQfpc~+3h@&WB334J zeaplPXHb^P#8gvru1Vu(qTrgq!uw z5KvP^E+A=WA{-GfxD*7Fs@&kfO-V+Y$^6ERm0A#YOQBvZpB*N9p)ljeoFfczX!k4$ z+~T0iroVnXq!ydHd2uA+^qrOFGIw~bjzTTAT6#rRd9tnB$kxN!kh|V-BsOQ1m4~tu z-Wucn1C=G%^<7Qh@cYD=ink)dxW8PzWx2pT1Ym|&+fGskuGgVi_NohPS1-&OhE;1q z8Y%8yU^z=(KWn0Mt}*D5@AX>FyC)QQDXCi7;%n+$;v0dHZrw5R6!GtH0d<56_f`U# zM7RWQR0j6eo=14X{pE`VDB&@%k0eqtpWM|sDqJ^_6fhk_|kl3RgS@N^EPK$fvXrhG>I zetwz9l#?V>y}QT0#aRI00SvV>b2-bkeg1Y_tD`k7Ty=RJWpt>6lDl*@j?{MRYFOyUYceRC&t}9g28_G}X5c z$OiM=6m&${XL7mQn36X=A`;uk-9r&6y8yqbunZJ-=P4WK%BaU%EW^m~TTq}P9rzpI zr!yBj`IoX6Xq+B*J3%!P6pwjmn$0Rh@q`OLDUa5<0Q;-?WB}@G zdyUHPvf@yrADhXVu_BnI94{~)B|xN<%A_V{dYmeI!n@CyQ%NM@R5rjxOH5rUOu6@i z_W0VBC(Fn@lx8XTs^v2fzCprRZVOoA>48ge2HO?z?{^28L;?bEvuVK0Y3+8X5k#&@ zOu9@SrsE$VxSKq^&KIIvbbTy=%8rb-#5;$$&n0NH@F!Ft`HrPs6D>uKO!N&} zU-kSjYCI^SqbHvfA9$P17EzlMLWOHG93KC-;>zC#0h`N@w#Zsj8w{OS>DC(+KH(Nd zoC$SM5&s95gO#jD5lZTP^3h{zRZ8hpyvMuhyU<1z!~iv7`&6*={~ww+@ICqIp3dWm zIu`KXev4wdUhQ@mk&AUJWSA;uD&x)LV{TrmVl%*k;-g_jW!ju|p)uJYDg{w?TDl7l+mSsK>w^k}mB+Gf%kh#kuDi~Z6neQbc>v`}A%IEcj-QNmo|dyT#%{y= z*(HKk5xBTLTd(dRy)S}eNmXiDNk0x;H9bFmUK3nBRO;qv5ckHDyNh4wHPq*;oRFmN zmXTsUf4J(<+!i|%yN!}Du!kr~SL7@m{U8lZk-;UC5)gp`Vn8+1>EnX_pzpn{U#b@P zv9Gep3h=PsBCH~GlFMS|J_xMZSlP8UyxU?Q!e> zfIZt#@!{*?dgtiyIOyOG{FxtY*t%myRz`|C>jNV=BvH6>6l*021|?Q5_+n;A;NoiY%>>xYZH2G*@JO(r>WPmo2l$ zjqh0-3nmD|*k^Tl3PyY}tY17@c}Q zg+ZI}k+!QwpN;vf+{aWJ`Mp{3%W$^z)HAJM;^Kqu#Kl`Hbkvs(lL$$=r`vJ^ zW_)Y1c7MFSy-Vo4?Ol=fAwZ-5JkD(9Tz}%}JD{bV){fCi0(JrK1vP&*mZzmtlbXi~ zkfG2)>vo4ha(l0{`Dd@sWwDoXJaXN(@c9l}p=IV?S1RlJM=s9LWcv-3n~gkG1#_Xh*n6%1jKM}TMVFKDrc9Et&b>;Zd9m%IIET|6DH(|MnyKR?L-Lk@ z&1FAdExP5mq-H?S87J)kh~+ssGfY-|UU=THuD?M?d$WtNkbvlItXQti=s>b|sE?wY zP?ssV>%0Gmk8DSEjEVVK>=LB?IuxcWk||Qz*jN;$I`-mGEdZu4`qI+tYKZehm^}-? zTx?cs4_^Y@krw&PAO6P#>Kr97ln>UiBA|L-y=>4kuF4}aC>EC}@y{d>tLvb}&;NLx zVX*vvlE45mZW{?#!@rHGg7%^ z5KaNkNL_x z+7A5;_mmeYkG#)^%fdzABto8qW2%=(UATk2vQf_~_T|hduN7Q_>#g;VBi49S9a#Xz zre&Pg=i=>#Bxmzko1$f&Dj)3qobRzKVp3_93BGTRTb?c_&W5iZDzijpU$tD1%FUHn zOvvTnw{yeq!AE8Hi;K)_pb#;;Eom-Azvj_Yqd=sF@Vb|3H& zLJLodN<$p|u7WPfoJ$XBNx$xa*8SvWbdci>JazAaFV}+zra|wM3UrYwTb1+Qic7#U z5U`Y}B~}G*|Bo-@P<6Ia0!Fb6LcOr}LtPxi00@l>cbcw;JvRc36eWc6dz5*8FQ5g{ zKmHwj2D5VT_vp=^`~ba3JbgQDq5klz&bMgW^~I*=<4}k|>%d>QG=MIau08$rM}6k% zmnlo2ZqTU)dJ26US)kY0Bn#!#B<%Sb?2dM1EvZjSf#(p)mP~DqKw|BTng^o1V(Czk zZ)pRswn0)k7gHBBsJegqhWD9>r@q(5gK+B*3gdw?pQDu1oQ32VlJ2ch?`HqHS&MEm zMZdwOoT2?%cz&&#F#;VQ3F9B&=AXW?rFtz}=s}-s6uA;9n46JXlZ~LN)1d1R8{U=x zsw7|K-+D!)>7QfdUUDS+{cH(tFTPej!<0>C=47TmQj?fGx+b3tr`vHkPNhvO|yi^k>astNoTKsV4tM zJo#KNfg*21t_MaspQ3J{B4HdCm~|TAoN47;=ru}TeUemD>SoLi{0t=5Vn%`-AIlIk z5U0SRlaNrrnn>sg(yT@3iyjma~BC!n5a8lxkla;oY$_wNKG2bB;VGHN8Bb_z36vLxO3lWITW z^bbVVj?%ZyzZ(qkCY1K#D)A%zMOI7Yfgfe5PXe~3{rd(mcM}1v3M}-Z@sN7qqBl$+eMgyzQmQAg3kQ3nU((|C z4hU~H4qsjnp-y6IWwQ;0GsH(fIuxa->}4sB$wy@ZJ5kS<-sd#bF@!;cDx6giU)^G} zLe|py_%b3(O00nBOkiD~5+R;}RMDLoCAu2hOAnT-y4P8?I)uI~T?&??I|F)soc#{u zAG&$$vBm@A@XNIR>opif4$%B){w~uIOjas7>RsRb{=oFl+W%IZ?qH0VX^NIvqg6Mv zPKWXw=u{2^pfEOyfa&c=9_A0*z6!c|MbENDwVhRMNR@EO1<}sf$y26tEJMDsPHh_V zS?Ski2536+?uKOjtcYgcN#@$u)t50Fqb9@VZlZdLtCc6gVUE4|NC%wXq(dYN~10!i*&#-fysjSZy)v+BQeZs2~2yU$Jdwccg!Vt%3%aA zao|M_EzRIZ=SjpYOsD~FQ#AGBSo--2RhR9YmXHL|#ih9#KKENMsd^uUKmIIV2h@|7Jh*{nIzd3kTz?32yt6F0bPjG zp4~Efm{~ffd@^a`h-&GB$UUHuKhm5p@4e0iq>$Iig;jt-g`a6W#;8x}A z6eZ;0&Q<|9u{sgPm@u~4Sn#OMmjzhbJyQon*7{dSXB1yf1ev+4sI}|W&uzceweLWv zQ>gr*WR9W7@02t>pB6g#C+q$%mMdgn!fnOWNP`!*3fxLHNLo7gUY4C~#{0p(e)sBP zqhHqh8C6K$^9w+b+k%#PqFze#k9P%0AGt;xLQ&3@ceFVS=;^?vNE-&RZqBWuwT!J; z{RDP>yA{Z_+O{MQc)!Kjsyl~A6^#j*v@z7HT409r9ikUJO`%LPsn8D_kRjten!kwR zVko|XcMr^F-_b4eATJD*y`&_LcptLhU0X$Xn@;iOuY7e9dOmfNS z{`I5>W_=_e>~UdI!EN4P18t|**CIZ^A0!J(1yIqHz3baebZAH6K5^5R7-~Aa=Tqlz zx_PAAtVzhJ?O_{LLJb7NBksET{uxDQDly+4Pxf*u?aM)A8xZ0%sFjg-X0R7e_~#sm zAGwZ)Oz0dRJ=#5Gk;P*VptA*WQhZ`h&9m;mFyG=lQkHL^5KfV@0dvdP!b?Tl#m~y! zV`yRIm{NM>Qz(#0Ng{2}2A~SnUo(TrKZ6e{nH0==Fw;gF2c27(i7zZ56fyzu@WFwbpu$6W0*`LZ#8(V0au?^!{m zHEM!y-3@EX;(JG2g`*8cG7=B5xULtY&^%4l`LyOa|9cHG@Ukk26;-pZzw~A7`>|+* zQ+))hI;_7vHe-FDb8kTH9n6vaw|guzVkd6SMm+2>>f{pjtg-Y-k2Mxsf|5Fp&6r;* zgIpVpwk9{v0kY4zx!Dn5MnQt(zB={s2B!4pUy`00!ooi~|n{0|m>&7;qjW&-O^PTY{ zVXPsaID`D^cPh4=JUPdwQ>Zla661HaT2LVjsi4ag-SX9(aUG{KPzQ|3Fmf6`q#wJG zOYlatq|3LfOpgm=RyqTAKmOv#$l5@O&*4d`Bp(@^;dMFpZBO)BgGz85t_x)d0i8yv z3?hh>bA@iZYK@QiFg3LAwa4-@M)M*TdMn$rH#+@8q%P9E2@P`h?(1-1PQ}v44?Noip`kUNbA-h^!n6rN=K?x|X=vDF2d}caJ!doL)&{Wy2O(m(Vm4Jq5 z!Grpi6zbSq@ob_f+}JoFevq-_CWVmLEqP0VMlR-F2uu?@~s@zD#jTmr@S)dwGHaiytC*n zVTWLnL=4w_q`A-l9f6ERX|w-Nn2nj%OeW}YV(l~~pZgjb{V{;m0a083TX9TzL1=Z9 zo8FSC4RKDSD1q+jiwok>EG-+nEj7?a?8P9~s{OSZ@z9Y$u7HnD(giOIBF~s39{A1V zSX-vZ2f>Yl5-wzc%MEdQJOIiAK~zHhE4?PIWBar3k-&==Q)!$C*RUch$ODHo{YH**;Z5b-^vxXomjag_@9 z{>D|k%-_F633btzn7>P(Hs9`x$2(7tv86C%? zOtzy`u5I=)L6&EGPd~h4r+12fyB$Tb&EZA8q2P*&T9JR+s$AMn>1vp;Y+@PV^jVo< z+mT(IV6o`nhu1;rMYdb-$_KQ^XMtgtQLlCPmFOwzup&5N-3{+wL-Y@tHorB?>^%>4 zIV{(Sq2q+o1RJ5#Lw#aU;PJ+A`N?fS08Bw+$+Fb3zQ*?r4PRgChc=lnS|Gp46|4?O zv)<*<`d@Pw=X4|ZgLu464akL`5qJrM(TZpj8`B!pVC+T$Te*>i+E}q&3A9Z%upSX zMqUM1OD03b)n5N3?5(bM@bNplhTH4i{dn}mtFlN-Y>6Oo@+x!Y`r0uZw zD~`QC4lL93oPz4R{aS~Uy1#=%u%HV$Y61hn(PP58|Et4VRqa#NU#ep)QP+tUw>Q@@@bZ z9&Wf4OUf?je1F9>LK-1`R?IadQzgh;);DsQQxcAv;3<$4^qrTkH_vlpS%|r)Y;%cx zjjU4vl0tzc+td+jetHZ{pa`@T>j6+u!y%yPKN(A}3G+j&yRm);-@bvaPC~A2zSjq^&eD=92xUN?5|RS6Z`AiRV&nNca;{u%Eeg)@ znr4<3N|1W0R|Q${W^$`Z&`!~r`vhA*^}T07Xq-m;L=5$ZU+WTj=Q~gCMu+1D#s95X zSHpoN1h=Zt`@#z`09gLll5Qz4^j4=BR9Pe9m_1BWO>i)AAh z@BdN#t}f?VbimdVwGdKuOkJG{O`-$88r~IYN}Lf-u1x#B`Mca(?vcCC==cSUM+@hA z6tX<^{^g2SIpsHdy%}eq7vD~1Gg}00hn!5VWK!?TSpkA4U_?FA|5%oL#Aq^k3LiMe zxQp6k9mg55acI$B-I&uOXZ;w6HWuZ|1KA`aTW8g+dfZ`7U$pTC+ZT>ufH?qLtAN0c zjA0S)5H{5aC{4r^d1Sidaj&)asi#^4w@KBRd`IEc@>ZvS9&>IP5Y@gWpi$>0+ht3w z;_`atAx>>vfZD2ZN3`~pZ~jlOL|ktkrzEG-hN&s%?9m;20|U!#zMW6l6HQ5K{>uZg zgsvSOP%zPyAI*ezSd%$tGH&DS#xk~vpB|iRLQr_49~#`Hk+KM1xX5m+)>Sc=oh5^} zQLCamX@>)hqSO`0Jay`_b((;^0rT|Y)3VuEuyNB^)+Ok&%KsIzb$+-OI_uxe}#0XUR7nuMF#6v1PUt%bIA*c~%H$RAHblmS@0>6Y%Le0+{YZC1~L4ANC$iBZT z$3+@T=$B$k;|U1fpu6H%aV#UhZFH^wli)`Uje(hXtWlfILRvFhJ1LejAoIQ$!)jnG zGU(}zRkqxHMrI`r+Ov_2w?D`-SGY3`>_dwWNGQZZsL4ju&ro)h%f;9u7^4o*fExl1 z5Q@k5Nv2>|H54oh5f%8CEXp#t4nSfLlr*k5+&UaWb=Q}g|QD+0Q=%AXNMVt}`hB(uQ2 z#X?%RQ^n}xff%~-k&;;Wp|NtW{#u0&_kw^X_ou27RiJrdb1?x4YF`Nbde4=Sfc{hI z=9}?Y)41rjewAdM%TVT|Nn7CNvZL_&$Z3YU%wg#|%te(#ba}jx!6a$OelO&Y{yA!2 zokC4eL6pghN@KyFtP6lU!%*TbkHCRoqVP%hstAU^=VUkI>KJ!utBgbElo!L*2NV3p zI}?1s=3YPl+}HQe#Qviv5u8t-CblaB5zod=%-`08=oJmjOVL2E>lI}uKBB(AW$Ycz zvmFpQ00?I~$0x2zI4KATgf|hl#k#jZB8dukX~sj-pwneI2GnV)B_L%u;ju-%lOG8u z1mX9nVW`QG^Q;P*aA7$T)1hBIzOX6wr8Lx`)v~>?c+}avrna^(Y*;}1(9!N>(!R>P z?eweIY>YrMYnhQ$G!wsfvhK$K-;qWu&+HPB>V3sDLU1(zh7-n8JNbbJqXA73wRYrc;n#M^yJZ<=Gk;T`=(J9^(#S-^!9_R@S`j z$oJ5A#H)1^l6L3cduhH4w6O)_ZQie}P=UDBW$JQQ+6eux_(bsnQ~g~}RECz;8C{xZ zn*!hq3z)<{S-M+H+k3ie(mHCQ(PV>h4$$ZnVW^733)nfzTWjKYwo;2Ro_+gnyQmr7 z?O)A{E`6qYL6Nk_aYt#$gb0D{Y_*o>OpbZ=GqOxpsfrMDS&LL=-bGlW4iAMk(iHoK zz7JJz-4QL`3c{e?tM|qC4lm6jg!SkqAxau4#N1i5fUA zK=@q^_57Tyh3h=TIA1Gj?EJ0zS=CQk5$54Qql++bR#@m^e|+GbH-Sb*!L)O#TSCU1 zfkH=CwTNY;ppQ28Hhyg@eO5Azz!0AB(%qkY!y?rNAa09CZ}qPWH+@tF>sdf@d+l-hp+gZ3QwWo(y^VbJwoCr{=lU9w z{6ybcZbxj!D^D%}MlptEyZH9J8IAYs`VAUyaS1+?Ee%(o3hzNOmGKNx2cqqQYLKp&4`B z!wya6DStG`Q7HpAX?(~T>J-9HO74jpPBfc>VAgxeau|OQ8!#9jwXC8_mb6kn`RiP` zdjLnEXmS2qv342l{77?pv|0jfJk7Zel?8#?zKN$YVlLl8k<8~jhsh7FRN0ptF9ie_ zW6U_Jg?w(Y^ZwJ)SE8%WOmA|@;lc$BDX{*MaLwx-YL)#$`=Jq`Vs7fGJj}M)VeLVW zE=cv66UAix)n{BPS(|EDm)(;Ub9khN3t*tq=P%P_2J@{V^(7+zFMGofoJh6^weT6| z_QJ+3a@Y9YqeHZs&y{ifpU3j!<}zz7d*{WG5eab7^AGCoE}6ek^BJ|Nx|CeBX8+4O zdyimFFl=RkLW6eHYUECt-FWz|XzT;Ec7mEvKYKiU9oDTip!3q#g}YyHTmzCxdivse z2tt&H&mC5n0lX~+<*%uwJSpsX_#)Ozn*1ffSR)JA2C_D3y>Y0?jm>XveG`iL!}9Be zq+YiNUQmGza1SN@H{Uv6-ZLLTYC%t~GV&7(UuRZM=ZtC|b4 z3i8?4uk1eRAz#M(o4E}yq*eh>hL96Rs|D2_-fr&qe~yAbY8=nWtBlrxk{qA0Ln~z- z3d0Pt#-Uk5;fR+ki0fks+Z;0@R?4isEtt$bms1wy6wn@@ZaE`{tE7wI~qsv*JE|WW(xOT zg?Sn~^Hte7{ZbvNdcO)Q_c_^|zeq6VJe|yH3yeFlLt90}f){11xCmCcZ@tFf7rWJf z)^1HTbF-lPcwA^(N#3C7CDM@&qZ=ZYFP(^SkN0UshOqRV!=Wz7*4rs-Ja&FVr|zqb zgTf5FhK3c(RZo@2QdXgOGz)|`8LnpeehJDpL>EwR^b=O| zN3RZ+s~9+s?<%!*-zEUaNv0 z3KAY`{R!dZx-cH4iq_m<4`jf^Y1RlOSWdYpJ6RYrW7u{)#zSyUbt;yi{$}pDD>Udm=sdt2$P9$QH&^# zf^%qNeL$c;%=GNv$uwhZ4n<9q{neby#qD;59~o-22n<=6oj!ELIi)c9bD4)X>)6j+ z8P)IfcAB;O|4MAqlijgG3)EK5Fvr`h<4z9OOL!9F`P)p zyikJj7dn?wR4M2>XVkQO+|)Z@xsEhm>+2 zpJ}46U5@M}q(?}oE;N)@Wa@1RNH`9Gim_C##WHyi8o zmjCW++{0vrZdcY$#ShTN2hHwZyOLq5;p`~Y^#e=l^zoY383MV_?eIGT3yb9x7K(HE z!0vw;lx033Uh-NvQ=I8Vmw6)j#Usw*u*3S~+Wet98f%{RI^|7*l~Cgu<=bd%cwiNjVV|Nlx3$r1cSeDaGmH^g%XXL8FXYGWb(}`iXWR?p1$&~Z~=R~cy^Yt z=)HK3l!u2&G*$emv(^NpWH^?8fhj@h$m%XIx59g?NB6OgRE$EC&7l|K#=z&F9hpdv z^uFtDPdwkRa@cy{%qB919MFS`cE}`3?PhX){0pkp@y85a_#9Xx4(c${X*qPLp|Ju`0CVhyi_>0a`-L^z^0N zO)KeJ*`9$C5830?>vK2d;`iD{79wk<=mS~ z|1N-Q_%QUp3Gkho{0put&Q7@2C}6}ud2T$rdRgz^U<(|XigD)Om<;-Tm%1LrpmLNA z{1e6;B~%h*w-IbN)#!^#sQCjn0mX{K>;GDPf!{GQ2?yljsu9Kp)*B#wa!Sb3Qfc`a zCa<<8ZD4=Gbl7SY-7%7`|6##n*rvET=IGFH8_zj{PSpWa*YUaoQ6{K*sXuEGrjgbyVzdt8+Y3%SBf|^k8kABcs z-`hFu`!0%h8+2HM=>2o`Hg9c%!XSJqL~&V;;#J;YZ?eY4`TDBzLSq!)Z8K4-xOvTO zO5A|;&9Wi7*+hAS`g6VtU$f8_E2X^EyC4mkgbiT8S4YjD9FwvpV!fUGB^^wu_|4@D zP6u$T+H*@Zd?x!0PWLqZX_a~kRDP{rs$B$j`AL0UAF@9?%E4VW&6su%QCh@7x+1_+ zVvRx0{DHEGMSp??Sc>r2r24eMFOuI%uC`uKkq~_FCIF!zi(2=#=;F&!odQqn+ne!o zY;<0~#7@*5OzNf3R1hZ4$-%Sa>SEZ7pYFQOqJak6CXi%xy!!}SvD<;RZz`&Z-{VZX z;JjGs)n+Mu>Pyiw%tGd)xI{W4+s25O=LQh%PfoT+a|nc|T=p`M?#?G)Oc`Hyvb?LX zc4OFJ%#ONot?!1|0xVxAHVL`Qo{&lp*jCq|h!0ADF1M3zt zTp2J!_I=!Q5kIkvu8;147}mKe>{urr=boy3674Yf(hw$iY%pP5KWgcxhdr}^9@uls zb|1(!7?^@urBbQtx-9>D97hHyzgHU{$2UhWF-& z8}GsI1)4267B&cpD0N1$E>QXDdJT0s3G@SG#Tvi~DJ-C{zjsC`21UOufC-C1(XX5` z`8GtSz)}xi?(K-VpU>L4<$;B9jfAO^U5m9mHj`RqlEE(hlW_bPVa7Aekn!5ipayPy zBB6HhueY1(%)Xvhp}VhD(#WCKis?T1asIfQL)PaKoqqWee3_K@R+jHnRg<%=l^F=f z;t3yY0Dnt0AFE*Kcap`Jefl-ibnj?6h|qnQ5qYU^ysrE^_Q`8HhB}?WzP$J^KzLJW z_*sbZVNNc>I6M+aH42RSU#UtEycpWeIsfT~VuD{Q+8}Uqgzl5@Zy*MeFTudUaGib; z?k(EK+t#eBF=j|eW*KDA(2dH0^>be{TuC23yA^BjGY&O;Y=#Vvu{lSgtR6P_o+_8*69SShgLl8<2HU$5-!W9e{ETQSu zO>M>tanFcoylIbmDGs-kV)^Lnv+kU*#=#}20VVRI@<6!0c|lh;?ll?qh_08=E!0`h z`2&@xch-K<+LiM!USK?@!JRLhIuqBVmYxpZ0=DnS%=82F;Y)?d&+{^4i;PcxD!|T4 zYn@AGi6@6R)1RppT|*=1HIIFd(wna(eNQIb(&)eF*;`QSc=|Y+eT_$|vDRc{wCF~T z7`7~I{r)MT?&Mvu;xK)yY%B_q65e7qOWL4%3LloeYX+KECY`YB2?(=GHoL#W;J+0S zcz1Z?x5&ZX$I-W&2Pi&2V$Zqm?jLDw+Vg!b=0J1DNPf@~GDau0^Ic7RO~`p;UpwHy zmAX)Gh9ql_?0v>a4m;gIi=1Ouvm~WA92t8Kx-X%5Y^4H@mUjTUUXwX3h^o~d(4S!O zQnuZIu5_)1v!!s{&MCQt5W$_b0FmwMp{9KmhmW@4gzDp6OUAwYKLrDsA&0DT*Z-Y+ zRgsuWQYFk{(qa9H-d0XI>EP!9zcULnPwXUJ7reZ(e@n2wDn2KBW;1eD*j~_FGp0w-@g_g ze`W=Vv0gqWShLV;LLQb=jw)DX>#TX5Y=3~ywAbR%WErRjzsZd8Oi$Fa4P8}imKx=Y#^9J=nk3J-?V=NWdf_Pd3(0&2=RP!<5Sdh%Qt+5p zjq@c`R_&OfVx+H1+1%%4+YbF1e36<6uirxMkH&>!NqgT+9}9YaZ{(k$<#yakK6_c% zejs!fHC;)|Gy#{pNL!3&CslM`VRXA_()bIE8&Fa!_aw;%7eENp(>DO)t9W0=eCCX&ov33XUT7bx-wBz;5&>kl zjyKJqEvAkuF1`BmPje-`8pCAY#vEu_CY@AsD;8|4tL^5Xy{HW_`Q>2cAY!@q2@ak! z3%;3e7jAU8k#M$Rc=qV&qZ`#S=B8&*sEZ@j&Nltl?mxaQ+y9u=Jz_}+B=e^inBS&Y znQi-z-ku8Vaf)0Z;Uei_k0x1XcaeVRSQj^c{~MXV^KagVk9z9oRE@+mC6w6Vw!LcY z%<;n#?-K_ZPSg7&*#3RExP9t}3ojIN6!MNEg;JK-F6K4=)81v^lWis0#uJ$HtbX&csj4Q9&GZ*%z3^x(oVy!VJX|p65L%= zzsQJMq7cMdf0{>&?GI$(z({GZZ`<~R@i@Wds4dd-Fn1RN{ zuN9#uTXGUk+@uAj?I0*)Bp;_`AXK9f!&yirX%PR&#Mw@CgaZ$QWo234mkW zPBt%36gg>ky&W?V)8?^jVSd`7ICSSSBq;==7T`!7yf&GlT!P76KVAU1{9ML0NFx^l zz0@aaf7!OQh1qkrFRcMPRxmCTw(3YHNROF~54BG;CqP3p=V} zOtcXnpC~of@}z}I3hmiYTy2Aj=+15NLPntnoImHNAVXE$h1j z*l4zr1ff!eCh+@J#Zo8j+8WQZ(#)po7pitVxCk_VXQyTOGnVcWb%kv(W(c#U-X{u$ ze_#ooU_{1s8`a15h~+D^M^BFwFG$wMW3Pqn%vqbyB2)P!y8&7QAw^~nT@l_yZUu)L zy%jrj=p_3p+qny-2@=)2o8ywqjk!NkIrXESk4Az2*&6oz7csPI%F?>={Exz6zG@4b z_+ko&k6;!+c9{2!mZu430%D8&)d_l6KUi+ZGOn-YX8;*vSS3&{Tp3uKdiYA;28YeV zB$Bl-9S7^*TK|opt-38UY>|7&E^-{!uJyQ*hFIg-9X@%(``J1<*We9Lpe+PDv?Iq` zC%2*XIx^eb3y9ri#+w!rjx}?R4o3U;;|2?GL$9;GK=zK(lwSnSEB_XVg|2UfZbsUw zHwz)g{qO4!zJDJR({_dYQ1npCZ_}9;n+O>Z+$}x<-j*GJPZS{!6zS@xOtbpULRas$ zZglS^c4cT{93w!=-t`#EkY>jxcHQ&9ctQl5DajYr?td&c3vtr<5-sT5Tz7eXX^!c{ z6FtT}cq6T9&k16m5~d zFi*evlX-ocpC0K@7{proZKq=-lLR?apCdAi7_O6&Vb@)$(t}I(*QgZdN93$PuM*?Z z-dvM&NqTzJu>X&yH;;#U{ojD~bxuyHoED)lbyQB-l4Lihs*XG1wFi?El6&`UU<;7+dPDkiqA7iL$kJd2 zo2Yko9Cge)bK}t~ILu8tJzI`VK_pas#C*@r{}(DW6D#2!$pSJyUYk4nP&>sJ*yod% z3~UX+((b8=IIO#k@z z*HtQMrc42o3_)HJ#QX7lgBX*sgBuMxX<&thJQu=WV0b5OpL-#<&G8Iq$E;d@2S#ym z?QPDNcnz4S?%YkY8ELU zCcooMt*kbhtr0(>?usWw(CHE5ez&9~1EqQMnAx5pbpDnbUEkLf7#!3)lJ%F+3XJOU zjP;MmfIk+#AUhY#`a+2H?rwT^v9Wh2;P~5n zOR=V@vFy}p7WcHzg&7>`p+66`1dME5cpeqjph2DzmHVoEc?H0SXvlTdv(s{=ST{Qh z8DXoG0YV5(iEqj4%0Ys1M`dl8-xrhORC(AsZJM>@$-{ezc)UP&UdklLE8gf7{JD;% zD^{uck>nq1XDJXrohDyJD1;#PK-ZiU=U4F713fHBgbRZzwO`G4e6o&9%kfp3aX8O@ z8eZ^g7vowBD3o@ME5A`E+6R#P!8zrl8VOtn=+?@-?{7mS-H>Kic{*hgB6`|p8Z1OxkKbt#S}ZMiMWLyxZ4~><*c^iib~7J z6VOF|}+D1D^EcRFdNiy*&QnivRZ06ydHeo8@h zn}h(9!BYR=lwO7`5*Uqt2VNyv7{&~K5~6F8eQ;XR)$i`Bd7JEdkH2c3aB^^;gr2Mm zL!8HY%cb|f`R>p4crd31CCPpF=I2ly3nW-GD-KU0Oc6zyBx5d|C=OAQ+Q%rZW}rIB z9O^oSLr+H=6cB7gnv73?KU<-6Uy|?i)#Ad{fkY=^5_LW0>y=n3c>v3iZ4evBr|55` z=E|%6C-+CDV7h9wEG(sqaqi*@gJbnY+Re1xJjC2WnD8S*`2naOWNDuagMUm?l%7n5 z2Q@arp$-xKL?)h3BQkyKX14ADEz-p*1gQZofIf(Y#N=hY801DyYW!=NpzX6n4=fL{ z=u+0AQYvcuqVz&ZXW$zwL2{NM&0eRh8_U+829VuLWo0*H@HX<#P71}PtdMMi|h+fc!|SL3!sJTZ(w`Ke=LW2~_x)?Hj?1n*L;cxZ&xw7cv1w>`+XR{bPZR=g_&8 zl}eZ>?WKb$6@q+||KvIdo`2GM z{NR#IKso#;w;qjAm~N~?DlQbBtCAIlK&#$mm?TLe5Czbe#l{j7##kSEYrb>RGl-uJ z3+CWIMXuZ05EFXP7eNM^q9rqu!sI@aY0T5SWQ-j+5{RSy5#SNhY(P*~rDudirlq!~ z?}t+m%=6m&OPsa(4<|*enX{Gv4(hGYfg1ZyZYHkVNqcQdg&X@Ksu)1k?d4Dx!4{Es zE{-^x;W%7?3z=Cn!>)WL@OX*!N zn}eEY817r5EY5N~-KfFpkSr85*n-KuOk}2Re2#nLv)9Mo2TBvDPPj+xT&PwdotYe( z<2~T?uh{@vB#v7K@|lC^_xpq#3eqdu-DG<~_5Yl#oKP6PH`aeI;(F@AiZwb9(4~*( zFGt>gWKijcZ62g)|Jg)vL=b_vGvTeTFJY?B1fY`!gD-R9#59nE0TljR zE7LbRn?=&ATEMKxM=NhubSG3_0YPIfT~lXiaB8?F*%&`At-Dz%yY<%@4gZcpJ?=M|DP3xI^Z!#qdVp% zCVGa4sBf7E?#gbjNHwX=V2BFn*125uxLgP2+CA%0fvb50*78FzCv+5nSThKU1X$pjo$ty*e&_wAZ+HretTTw>L2fw zL>_xi7!~a>wcdC4?8>E_PWAgM5z<{o@NXZBuI*kUt3g-Cp7ZiLu@@&KF-y_jge64v z=)Z_1hrL<~Htke(*m%uM8yFQ|1mF|7vHLY#cDF%tLi>{QQYFF5%U2w2ZIh1C%nnNJ zMw_HPGK;Jij|AP*7Bo)38eUEg!$!!4O$3%+o^QSUsz%EVB;Hc+NZpFZ{FQ>a5cA^F zI%9qf5n4Asx(sL)@gxN?Xwqq=U`r?NR}ov9Agid4JIl8$>#vq_YbOVP4N~-oZ+h-voxg#)vP_la&!*9VIo1C#HhqE1_`Myk5Hdy~s_UJGdvq^=XOzmLDZ zoT?f{ZvgrN%V9QhsDUoN_{H}^=V3GZr;g&1l}jc8DtD`s!8SQxDt56(kpWpvuRtD< zz?YvtA6L$A37Qap-eFy0J!xG)2AD>Q$7^jH0ep*a^5LuR(&k?;-O^o>6H+P_pmY4e zw;Fbq{%-d@^LIxhX%nYM zsic%xO1>(Kb9|uT@0g34>WZM8+kQWLoq1ohrq&;L*ty8D9H6h{={BXiObp}b2T>g2 z$!smQEpd0y_|m0uc1Yde>Ppjkgd`Sx82D^Zi$#aH)|(q{;&gDI>Ef8zbs^jz9oMzE zv#*O9gZ7l93t}(GvbP;;-h|}lDEBGlZGr9)Cu-o$5fXdc)Og)(XqU0X0mLOgiV+s5 zOsCyGB`8S=GXRJC?(5>lobnLJM8$FAhF|_Jw>K|?YjPhqQT1HbiyK7l{O15O zSym;o`p6!zeZx+dN}2L%%m?Vt;@D-_jyOM{G5xjd&!UuW=guTd0OrKr?SgXq*1VC( z>Nu~YO6ut!YZEp+=|O`fFV%DOo$egfucvn^X2h3Y1U@AWWit1;z7)Gx6=T6aJ}n=V zA^0UXX92KKji-)Ou6ItX7@tLvAjDkA!myo-2*xF0DXwP?J0;w>Rm1_Qj^qNLhC64S*sB!qtyp1k5tkoo zy`%kr$@0KZT*qI>wIG7&Mj!gHV*6a$f6nHYc1jKb1Bq)yZmqF2dxIR+Pi>}BBdPde z$cZnW&7Kqk>Tfbh^nY^pzho;!n#zf<1ZJNZaeib9`I6IX_(l~sAMG&?(QB<(sqVA; zS&9bSvq8LOPM5~lO%Q{1f z3(Rm#ZkX=Dn$ycl3RJ*AMMu!3x28PqG70n-h+=9C{ft|;rds+LQ4B=?W4^|@guOs>rv!2Fn>pif2{?S{|U5TpK+8~G+g43>=4c^vPz_WaJ;hb5?4@+aig6SonF$-xAVlFi2j z<%4G#%8XU&yG(z9@fHV^-=dNs&q^?MkG*0n`>6%hzh4zvma!DDmI^M$3 z$&urTcUONLmd@8K-{&wi3$PYZK3{R?-p!#FfZ4hGfuXrPyon(qqI3~*B%~QTnzoK1 zONIbkq#tx4D>!=H(AdB=D7t)!H7M&XXF4;Ah89|li`fn0SB959VQ!%dr7|q}2`3ZZ zOj5_}<5sLzEqE2)&(dVb4m`uJV+KrRW-_eK41R+Mb&;o8{afv)6NwN|;b)s;t*{9~ znJL%3OGCk)rn{;rF$3IpsGd>8XMeCQZ@RECWW{MG--oKO+2vTMqkdg1dAw@WJusy+ zrfcjtsfvzmNGcK{wunw=CwS!=sR5zI?Qz6UI~7BZa)XPA-JO$({(cnFgUhVWOH`GiLNGWDk)NrKIt8iUt`oO#Ha?(Fm6a>BGwGyu6Cc(H> zWK*toFh7_((LG=us5PNM7OxX~-es~B7LWKv>ALuP8b?$dHoIi?%!`~Crn?5`bYCdx zJNtW?RKBM&x;>_g%*nQOM6U~^2>=(N=YAeVtB}bo&N5wK4D1_kS=ts;=5my~j)L7H z$}hMQazjb@R20Ork2r4awA?I?1{EI+YQ@-kh3!teXhrE+iCblPlfVRkoZCjt>#Guk8FW0~ac0T_m!Qwjzs_Flvkjb16O+6@W? zAZUG$xeGln;R1mQ;v@u=1`K@yQN6sFGsBpWccs_L5HoQ%TICa79qmr7H@33r&P$Bf zDloC7?u+7IQIA3)#>QlLNnnSu=zDfZMB_2Z4jl|@$fgLsJHR%J(`+L zD(2zEC6}=HdnZ3?P@5Do3&)6r8Pltx){^6$kpq6Y-xCBFBTvVWC$ka89n*$x&Ek=* z6C70^`}(~*Xmg#Y!o!W6<>{)XEOmt+hTxS??}q9y;5D}b+}3o2bi4nwLCOIK`!3XW zsng6>f7p8yiDBdV^+J$`$>n3Bu=azb`GDc)^H?)en|?CzTxLyzFm za$f~mS*TuBDTg#6ZZmILNFd$irhTI(XCg;YTUwQhok03ec-X3*?tYKHj0I3)7S!GH zfV2L~jJ5M$6Btg;6E_9YBv=1;U)>P$OI5>>b>NTe2H8D{!=}mdd-4m1xDw>n+V@2^ zqE!~&(VAd`KP#>gLL#lenMoVTSvX?V0^+li8OS^A?kisLJ0Pu%>V0&DpG`Z!Q|OIS z4cfKeexk^VI40_e-!l!p5ys|X2*VOlE>%iq1u+F z7c~vbA%(5q(b;WqK{Nehu3JwKGHF8H2i?x%z^cTK>z)*gURhCDYP;WVfa@OGtp`#+ zTr;~b7i#Yebs*|Q)ITFU<3jS^_y%uzhvM+vSDW%iNOb~~IK5+rJf{QSA}^}Q9zLKA zG=t?DSqxpg`UWA`S2b5vHD3&u-uT@J$h4Nt}pc1k|^ctJ$R&%!%7Eo8j z;yo$1cQdR$!FJ27ft5gQx`N!V`_J2;Ht*FD_|8(1wI|X*2S!9LFJ=ARb1e5kuaYZM zvq-b;jPzL2zb_Zc%1XwX{;&7nUpL1 zbTlB12YXUWez_88EO>%c3-mmWdsFQ}EFD3)<$9)*bhhg1B!Z;3^e=iB-Ry7$niNH+ znIZw^D!X>%pmCir3Wm}L3 z0Bz3Kw?tHqBl6Hncht7oT&&e`6AoWHD0uj&DPoG=!U5{7hLGt38=GO`<|B3qRoQ$2 z)t`<`{mTkHkxy&=H7*LB@p{G2558~t3O{#rgMhuLB~a zSObn&k9R)p@VCPu(;}@k1Ns+$-B4)qH+?D7Ozzx|L9>Voqp0lXo;wRInDQX1tdToV z4TV;X*+j){mut6H@cVI+0#;e*xV&lPuE1W2?v(qvD^MX^Aa-X@DFv!yJIqpb4)n*H z$gX?3X)E9Et@eEFp5A|~Tw8YQas8A0E#$W|xypSF zCYN^ShS+;I30@_pC6oHqzAzhr%a~HcXPMC=Fgr1RJM}1gW8Qq6Kaum1HBsVA&<=uk z@WO5f#R}iOO)_zUo77AP5(aeMeQ6Dkj@lKsqxT>PS+d+#qG1Gi(-8+%QPF#m?z*U?+fGe!CR17EXN z$5^%UNLBA9aV&@1vZ8)=wu0OWI(aDSKBphc4tROFXS`O&6 zZF?7}x;~$EcM1s=9Ah}O)H>;ID2z-bTwp)defbp835Gc%X`3nY>DS!2yjyX*rMwBL zu$}7czP0&7KImba*Q9tv0VcZo;&<6>8>?(`9S#+(WtJ|2n2wN)P}wDPcMLF;m?i;fs89 zCHUgA3-E(U+wh}M7uJ^iM-%e*c%jp_CNM^>B;vt7FVR}7ubgDGbi*7?`6V%FJEabL zTH%j=!_5T|m#21s)If%yLO`CvzV_}k>p4v80`3M`WwNWB2Ew|gmE>}8wTD!F1oHxJ z!u3B^db3PnQR>ZzEQ1~|>4jhwuY;+CfFPyaw6C%}leGj+KG$MqLXvNEMqK<5cH(2@z40`H2rttWM7>R4P#RNN4T_A{o4Zy0eV z>d_qh1!+ONlYK6n@N^-lyjB9O$6GR`;w!Az=5EXB*8Ac+|33U`^b}h5>+B zQ9=8dzH5(i)Vb_zKsko!_Fr$+-?@Y5Mp98u=|&1mz~C1E@HOyDOdKJVa~s{$yM2Kh zUUAbk@ka8mJ;>}Zlw#G9Uw*yYaH$YjZLr(g18EM?|6peA1l zlUUEE#q`_2ku~^4d1ym*Qrz?WN>%gWo0R z(*Fn_3vWx@N#nz=U6Z2JO@BF<>kY1Kg#P$#Rim1srOx+}!uzql=gui<#7Yu#Xg8OJ z)@DeNBBJEfHPSEM1=@ex=(o_R`S|LI%;*mk@?RU4*(l1WGZ279R?4=v1QW8xgpt`<88+} z+fO9cKM$Q52{+#2eZ$F8eHD4(z7#;4E#8kC?DjoVldsY)k&9vRWswO^8d5hNvP;jF zO0P?*xWF64d`r9@Y52j)oNyLzn?2*Y!F?@fO+$fR628&wJGvk8j4b)>a=d?9RikB} zatbk{B|e3C#G|a`^l|`}6u#^%A>I%vAlmZ}+Uf(gF*MeA25p)rxB^$Cv;eR&;O?-l z@za+2<*p7ce@WLzwl^p|pDA zLG1SX>D_vl!^cV5&o-q5RW6N<=`F^;PWk3w@A*LOk2^BFoTd_*vuL(pHXn5#by@lc zx0syTUygqDdoLOt?FemnKj0L_NG=>+9k~AurF2bfY3@*$Rnzs?FW&++pU9Vh>36)U z(t1x&L!5Q=>1%#Vp;pL`i%0{)Y~{S!srliuw(=IsQT4`9YcsFD*Sj@ z6R~+Y)PIQhN9y4$85MbL-u5qC+s4yaYzR+DGoFd)u-5Wtc6pR1BT!pYIxNeM^r1b- z<+J#mfuVGe%6cTe^<2#PPTAFC9YA#5GE@Ga90^V}{cY%5KJCuC@-mfFu~0|xo-XZp zr@`Bq$i~nUh~2z|OZANaD#d+f~I?00r z?_*@9mf72qwm|O9CMm0gUqw-N+>u8rIesrZyn^aKC*N1=QCQei2As_sq&sVTMA&Pv z)sXarIJS*vMy?GFuiS^2AODjTZnhbn8TG)sM2*-gwU_J;OM*FMt|gscIn}}y(`KX(7OsM|D@)saN>(oy1;%Y zu-oT(_~>fx-7@G^mtcXrOAs|o3!R=_oL&~>=v2P31l?Rlhs@Ov?a``Juj8XP{O;2U zm4$*S(8APH=2GjmKhuYqQ&b*>|`pfbnQY?Kcn`;BqO7#2*vA<%) z<(0YB31z|}aR}H*W#%WcCg^;;@V4xJE5w}{1gojlb$KqIFJrX+`A?3W&qShpbxQWG zM)ACFzVpJ^6 z)+jD5vpV`6CePZH_lPkZ$?)#Px^orj8D%1BfxV#? zlt(;fg~c)As||vZduL&?x=kt2R`3*Ju}+w-mw=N)r>o zH7Zn4Ubi5!6PIMXLa`!Su=g3U_RiK)z((arGJn#2y2Mm{uqdn9N)+|Ic%^I z8++quW6#EIK<-Gg1atZ?iLz4I#2(I>cAm~M!ix$-4Pk8&@Lj^f%w<61(8^sONmY0&gBZmDH_PG@YR1VJ}7qn1o`l`atTV<#FFOQU#)QG1V&v}Z`FC0{U?0+azSVyqxAjPVTHGNO{p2xabGL#+HR6)ofWPbGV&No~qQ_r@k6 zLLbqoB0k}fx*N(F#<(H>?hW}j>dcDG+GK}bUJ$fK0~|l1N`uh&whi; z`&(C_&y@;N3`7N1Ox_d7vnvK!NUnobl(1^Z7{5oV(L(!St>rOa9o$U3EtKKU^L|@? zdyN@6;1rsE2o$di90n2Wz?I8zC+VNn~IpS8DN{06|hy$tA*+Ccvu1w;Q zgVT{E^CErC?!b7b#kZ#AdRqdNgyjanPSq8FOA9I-52DcLAeX$l3oe6uiRm4L0ngsv z&V4K`t9UIgo6+=o%gU>JM%33yfzpss_^@=7sE8Zr(l$ua64`okdrJMIW+%Nv%wvRv zTc+waGq?BNutWN|XXO{7+og9|1fuUFfvNgcCBx2(J^vbd_~16TLSw+z;qR{L>JvdI zr8^yymv_G%5YmgjV6p>metMUAwwoaSY`F0TOpMS5RqF^y%k~zb;reN2#YLzeZfopdY;LnTj_s3XMqH> z2b<6Q2yncOFIAB8R;57?VgZA1>vh^NL<;V_davuBLu*Is!Y3i3W9ldO>T_fFLIG8m zpao2L%M>)Sp)KyZMia;sk6UxsqV8eIP7ruvgFE6qmK7onfgHlm9jpiBLr3ytpNbh| zcEhLnP1Hl_MH*74LZyav;M_m*iXSyeJ}J$zttmxhLGPt_FH@=2H9oDhBzlBEpV3-H zpyq=wWwFJ0-s{0lMm2w#I_zL zx!2%Ovf!9qv=A4Um&J{gd26s?f28%*{TvRw%hET9Y6;~>fsMJhnGe3mSek!imSOh9 z!L>$LmVgTDY<9V8;zDwBL&OLR|L4m3*FRqt@If6D4?l3qymG?!wIS@pLY0uKRiwN6 zn8?MJGEqc_`C7U$RYm%Xcb}qFM7vjx#y>%c3i-%d+mw{{e)6w)p`%05zj8%ehSw7} zPMuwLOi*LaC*sl4tkVZCe$7=RTy(V3Q9rdgK0Ul3*sfA0GcS<^$IT{8`Q7(F?7#BW z`TdC7L}2A;F}#%C8P|-vL|rOlGC*+A`~T!rjU&>KwecjdT4MG3W8N-yvso|kuD{_9w#px^ps5$7}t`{e^_IRahJ^9Pete+ zJG2ejAcm0iOn0rh^$Ubf;6BI%`(Wo~ByZmv5FP>oh)bG-$JZKcx+`vqd+yQdTZ%|8 zTVZO?`5ySv^zSS(@+6!<&q9hW)xfOKNRIP&2tH{f1vDSp?s_RUjQ$k z?JIKREZ32Dbbx@``NON=mKLYu`_^Pg@v1F@3)cm?hbIhNJNmG#K(IuM{AZt#>S(?J ztp6D&8s7M83D2?3Rj|PWc!YS-rPqfPmOp$XtfWt%dujmr4K?z{ci}{?yvaqI-4Kgp zDwi2!6&W=&B*>|l-{aP%lvg@tgWUx0eSA!*#Pq{j5AKQD%L{XytC{=vTrZVyBzmo@$b4T>M6Qf+I2mf+BL}fA=RQNUf_Gi#@78zLONr6Ysf2+eY@sP8c~oH;Wb=h zT36M+@4AwZR7;uNvpF-T_J60piUwYApX#jduc`XH|IA-N9d+iYtutMrY89ESvqFUx zb6zfxH$h;?9s_9$bA&e&*G9-YNe4t_DBhu)*P=V$a84kUFh;MS4uiKRHqf&g(u~{u z&r2FVYFqxgvC8a9R6b7rNCC>vRwF&)Cnfa8C7&>S99nX_gpF;n{9#SlMw|9BvsOVY z@h#VORS{glL?6yr_Q)e0d{s5-MhqMGYC#MrE#2qM@r4Dl*Ve}{VT~cxLCgqMhVT4| z=9|cn8g6Pn@|~qlu_WyCi<@WI_mbTE;&x!*^`31CP3>a9EQsrV=)TTCFu`VN#EWmk z78w^Ck4cLr#K1=eI`6kXwbKtQVH`f9ia8!RdZk&{JkzZp+Dp(vk6P#KnoTQpUyFeE zqE2DS-_)x6|Bvs%f8s)$pPrd}f{a-QyTiaaC4TO;#GnB*8l%JCzd|t_0v){9g0cjc z3nJ^dVF+dPY&_{KhG1Zn5q}`pA%k!fG0iHOQ9}4`wVh3{eE|CAIBE&M^6gr09sjlm z9}I2C4-(10m#Z+Pt)IXtlHuACq#=VvcZ6S9F01@z@<&t!%$((ode9R8lLPO7T{sAT zOucS9$N%iFEORi#fqG~$;jyXIFLEs5f= zrNUMmhACxgmT{@R)*2aIUfW*$qO!*%E4pRtP8W&oMtbkCiu<6k<*m~jj~z1)EPY?; z<9Om|v0G$IbWB~rY)C`6SUbX)?oaz*3Q25^^>J07Hu@+D%K#=*ZHIMN6jT)Iz%OtO z0sKiXPXE%PA5FJ-X!bxak~tJuQmF*kyET4B(^_V4LjDhR@jWwX8nPId79S@>^6*{J zS(e6?qFDaf#(G3hWApGv0uW*X|KvB%_z<3n&w{sz5U$m{ z)S~GK#c|QV$c(#r!pL#H3@lyLHm*18)=)Sle}^x1QEKI2!QM zeKI6TO1NygmLkg7Uz*Yv65#O`v4k4?>0Aqq18iHTuAzfm2E7y4FyGe5WspAy(tt&v zWzvv|(7j&e9h=VU4~z?p46D@pp#ocFCw$OOE}YyJe_Z>gIFFS2Dog|UjgUJX66IDZ zsc06EbFk5iPE|j2*mS8_;`-|L>MA%gE0E!P=`}ogS{D2r)d`B97c>9+Q%I*pHmUTg z<8$Wh7h)qOwtw%=8;Rd>s-Qg`Xn}rxbnV`p=}fle%KUzAQSK7-iO|pAS<|8X6jm($XIL)#D)u+ zIWMJ7Bk@|vhgnY^9WgG2sy~BwzdJEDjx6nJJv_|bWA*yk23k2#IXXrWoH>hDiD*vx zf`6T%?e}h}M6xPQ8X6J10J{`y(F6kcp71%&ubcz3A0zN&@`54i&Si8B(RC=MttIYy zSpCG0sY-BD+XDqS1s!q09+&xPYzkui=VCdL!#^{9;0o3VJ_G)>CCXvu2;!d-;Ug}9 z;04U(rmYtb*Ljl<>B@2^0dtJTQ>B860)Nlc4y9^aLR~))FUeCuzq|6*JYdK$hJth! zAW`AT>5Cn-ic^*si_4ljwTZGe(O3+_c~(?(LUM>DNzx*1KMPC(^ zPh`n;QVS?+Wck97!Gpu)FPEt6z}HbE4f%jc0?4YzTunWK9jDof%^3=m!jFmmN6Mc1 zSG&CVJ7<15Y)5lTENKGPxWJ7myHIqx(g@+QLfjji{0SbYfBYDDnF9}C1*?q~Tk<8F z`Dce3jV{>%qeaj&unNBh^2}l+yXBSCPwaW5j5Wc9El33uAEeNU9=A``kI+WCKV^r< z6aN3-jEhRTUpr`vF|@yvK+Y_K4I4Kcd)?Ib?49F?1FS&!lWzes<(6s7e>Btr>~axd zVFl#@!$%^n>>qA!aYgWn#L!FnO5$_tOSiBVXu$%u#=2d36ATKkKHvdjG5?J6zuiO($~J-9PcRCFquITDn#?LWIhu$K$*_KXQ`(UaQYb;#|Qp ze&yyjW*ik7$$GY>0Y7t|_{C_vb7!+-a#3eV5P8H{R#}U5VR+08&W(5x50%Vu$^K zs!}>(_ae2g1XM|IiUGJ8YjUi+{IqqS$@u>E4;1S{nHhqhM!XM1S7ax12WPfO6Zz3p zeI?`-mwea2l5xl?WRc4(S&&H0 z%9k-x@-`l1tmZ1#LQ0Ge(~u|LpQ(C%Z-zpKg| zTiAVz4;LnYzWwJ{;j1yz??&}(d+fnx_@}avYy~McX^rTg!hC$bAW^kkvFOx7^;xe7 z-f=;M!%n;97qiSN!%iA2#X0T4r9Z}zo-G@w@|Ng{2XfNV!SU_`zfX=ohA>du2c`na zXo9ObOr2uq*cm+BPq|82U&<6G4xJcWKYc!ad6f$FG1&dM8*jSXU5Z?rVp^IT92+E3 z{G$@D`rUOtC$tqM1nEKW(pc;bsvWzMI&cPUIG~@6ltIs_R>7Cz-V^?DuLT!S zOCsO~C)C;Lh?qN{Fr6xd58jyw3=^4+J)O`WBR-T>9{-flZ+=smbS-8r9nm5M-`<7=iGK~znCW_gRI=4xusRL*=w!UTdDhGhC||P zEM+Mu&o z5IJgT)a8ZiKn12oJt?R>!JPT!z`Eh7n>jfjSzHcHAa#23EWp&)s1I#+BJSSo6WAs@ z=yK$f`rW2)EG|)YrnMES>{QVfU>tX!?<%YN^uQ9+Tj=hkN!ghq+yxQEfc>z-Gf*4A zv*`+}#WNfc-0JA8km;UcLd_)~pK)1@4eYtb?t%7*<sN=Qr}Ur04&{F<)#5NXtyOAU)Cjztr#!o8;+Es%b#|jQE$EpAPZXHV z08Ptd>Pz>ux?tCy0JU6`WS?h|9d42RD{Ukdb?;992VuzdC-o0#Pd^g$;EEY5!XZ05 z|FY2Kk^;a5A4bsK2r*lwkJGP(n}K~Uv_J@E(p3Rbse2acLvV%cC0dCH(zaINxp$Md zoKz8-bQAXmMS1^BF1%kE9GCn{4x5fV$0I}paMzajlFL$^Ha^kPHGr9gfRi|02w|Ew z@C|=23t@)H_BQ5bl%{ZW5XvgcgF8rN@Gz*o?eN>^B#&_7{#2g?Kmoh}_o;BN$X!+%A%|Q@ga|7Z zSe^AUQ3B9{oe|x?7Oi1Vfi_ndd;^X2Gor}W_*}3hpXQF}BROe=2;9F>q`QIJf39gX zL&IqXpSIRWkvbix*0H7A*43mph5a}UinWHQ>4D#?s;^p4uMS1_PkvY0L?FhjG=McP zkU6AJ=Ljq&y!FDk>ge}hJs->>$D^;S54J^!L2n%(rjGy>jtsEci)5nnD&9r3b&DG= z)LBN50!vTdM|?!Kr9r}U-v_^)kwAi0sSr-^g|?Z~N3A!=-a>(ljArD7wFU^TuuXU9 zCywK&A(iap^UMaJnRErc9?fH9Ag|>0|IX|Os}hi}RpK70!qx&-G zE8FzU&oJK7Yh--d@bS4rdLff~&Wlt$cUh&&BePFAbw-`2e6zwvR;DN@HyDNtr1k4D z76$(dIpGCdJ1Gff+KVJgVFB4!nzU!VrEB-^6T#zYj|%n8cXEK&fn;?y+96hQT49m0 zw(~xt1Xs|)oZz@(Y-ThNcG+RxU0(gUDS;p1FC*ckkABOa)wkn6$xh)eJp4XdzB>Xh zep&cOuK!)|AJyTPcf9!Sbz0u$oQKb1y-qcNZ#Gp)uXDEX6K&^aQb#sOnIgUgow)B;`{zQ}`vBUU@TGy^) zPKxkE5IdgzG;wmTmC_rE)c&Sesjng-BF*^m(yPap95!TF4LG4tmDof{lx%+o`R~U8 zL*&{g!3x_fhPRKg)X#D)B_7 zL2YNkF6|TV{WE!)+#~?4Q1}RHo^*${Fhqn0^KcMPpMH*KkVe`Yq$ezJcOTM=vDhl# z^)F(5V-RBE7bxUgCW{^+F_w8S*lyhUhFG`zC*`sE|K0-=57YABE&s{gB+d|D3GE+< z{AM<)q^$T1gJx^7Z)duDCpJ)LuCUSPqhG)sl&8df&3*W3j=eVy#YCuHF}sS*`FADM z2i@2^>UtL#CWW2_a%Hl}xqJFz_wj~qzp5|YWhmfx0`}UPcz?Z2B zX0QI~jiT%4ISrOqkZZ|Mtlp_Fs^ZmwNV`R)v+%24Qxsm5k!ESu&fSefzIO$PsWOOH z94#;7c$A;;Oz*x7*8xVT@U0cvJ!qmPQ8V!Oc<1-)q5SkFVN^;l=W3r~sJ>pg^QWa` zk5D(YLpk9d6<=e+OnUJXG7Ba#b;)vX&`j86L3E2naz0p}eUXp8iTBxN>(oQmv_1-d zNsPFII}~5!JXrVGAUcTEUEEDnldrdKLY%_~^9=4nd+YET>~WJ6P1W*M?4kL(()vc~ zxH#&@(W7Q&Kb$li;?>C(`F3im;{okf3fe^C^aDugs}#daQ<#a{{M{D;_>F8}05q0R z7*trStJ4!W>Z3i3&e)VaTfBj2OzX#WdyfJXy zOuJ-6`{WnP3(q@Wc`Q zqWgSW2T=Ux(d=Gyi64GdV$>%haLjsKqg52j;C?)hNM-;FU5 zyT>!#{(s=^TjlPkl2F)v-zs;lsHB`%pYC#(SmkEU3rURDa+<@oPKaccJIZ0J#K>VJ z=fg0p63aQq*@j`phA}(!yZZiq|NQ>ixM$Y?2f@tD3aYRtlXI7Qy8o?aoa_ zI~1U@+bJ0r{7U1A#XniAyrLLyE_|Jvo^y1qy@RoIg{x1hp0kCw(~KjEB`ldEItw$g zHh$aYL4&%XHaCC9>w$kj-e)w=^V5yg7(Ars9`wSVD~sI(s{ynxiPWDwKxX6@(@$q~)c(S_!epl`^blLZ#7aoZc>nI4AkPzh$O73$ zXswO&P|YkiJZ0egz$=C2<1g$7)Woj_gdpL2i&N;8h-`iY&KURcdyRDeNCN$OxDCyL zoJLXWr`l2;rLAV-k-QncxOm|KT2^1ZnK5O7FOLmTPnFov$dDenP5F|L0aSSVo?obS zx8*A$N&BsaD*iBY=nbv_+$Do2wI8(?0qc(}0<#h!lv#HsX2_WkK@7qW3Ce1a*K2Uu zz7nHq2+v$L*Vt_d&1na`~E7eQ3ts9+P9(*<&B8kw2S-cm7b2UB8jijN(?ME)*Ua~Dt>&TFE!nG$t}=7CgQOGxRaERzqX$^lXB}j zZ&?~PBN<@x66=gY^S%53Ol8gODLr+vALxFk?}SJd*3pTIF_G!Rn$8D>GqW4xa3>d8 zI{XC^i6=)1d7PX&8+`UQzr>|xE#zROv$7tpM!CY&e@n}^A@=#7RZVI<^ZAN&RAh`h zb}%nD#hKuCg?Q>*=zIpubDZg`zoYY@OcSZ|;s(LJIY+r9_VC)Pct3`ms~koYOUoLg z1_sfG>%RL`b&dBH!al;7S6F({2z~`csLobzQC~NcM$-FWw>d*#=3v~YQlwP#xO2gL zbwU9ZL7JnMYw!ii_?x%>=wl4dzizYiHRBK5BUxx94e%r8L?X9;>NFLx7)Zvcc>kjs zBiwwTCCdl1nKz=0^)1>PCtb(RAT|Hjuz6QQSv>v1e!zWqLki5ohgUHH=S+i-Z5rB_ zTo|r?;;Y%MxM2EV+|AO+^MT6phYdR3`dYiw`*5y@k#f4_MZV%a-*Ji0Rhtv%VGaN7 z^Y0x`Rr?L!kryz5dp?IVP7Sx5+RCGZ3_i~;* zkdCSS@=&_ej6!}uu(|gGm3^U$G&oo(gR&Kp5Z+Qfx18m8<-r-zLD&u1 zmQ~#pRwew_h%-TOPIeUgxG^Gp%&fMzgB4c?KQ$Em{ix4|kYO0|5@IPUK)n^II`S$w zY;)4^PAgdq29T#iKJ!U0WcIS-`2Mj!C9lGS%yAaOGcs4`m3w$Ssq7A+du~`al1IIK z+(L?QR&6TY!>+zTgtjWYdBm-8pF|*=LX+Uh zaoM#4J(E4QLzcLA{sxNFIb2kPp{C+O6Xe_$>hVv64^`QT$17W&a7tYZ=%7BTbJp5S zT|usRY3&jgvAwjMSwkZrW{@md)I@^_JUUqNl;|0>$1yNp}szbw_6x~AycHU zrAa<4(@c~WYwap^DFhc8+j`?ARRXy1WkHFlAK@!onQ_duT)7;GTj*6 zz6L`q|8j-w8uEh#L1*0r+VS`VQz2|(w*`;dlV^2T-Hr34> zI%jkiVezco+Jt2TTvyE-sq9MSCVQEmguhZLA;S_(O{{6Lw_qPnP=Ar2l^!REzYzkX zz;U9%-07?TU1f_eqy}Tyt$GhC6kA2ak6d{wN(NEFppx={Z<%^^BRaO(=XSo8GgK=9 zP}p7soxr~r6wMb!6B`NHgzZ>jixWnE;cly+hX$SO1nv#EN3*mc@bS_k9+VDwg-CfT z;oAX(m{L#Fk?_H+Vuyq#I~xNyB4J$5DAXtPtuWP`09s%tIXi%^-Mvhp_fUc&_=FqG zp^ah0B*NBQkzUsF*p4o_6II1EZ5e{h$ZlR#l&yMTlqtQ+Izk+V zkNjVZsU%(<#PdeJo_)$ZaSE6$^OFc~eOdXP(cEf=o6?ZuK|9FsrwzZ%q;4kNK$|pk zN?~sO($PBmlJDd~SL0RZ!ujrAaFAjBp-tmhJXQUg$i+*y9W`NANZl;4{; zf_%>@p*aogBIK~*jO)xf3tK@@L)5x($LX5l49ct3O9jRwdohw{TkQl2zpLF@! zuK9Ev6IraaIr0Cp;elG}5_l$Ipt3IaA@L+-@j_a%iP1sk; z!MPcP{AdpuMqHNI%38pUXNL8jGmCI+@vbn%vp)ep2h1*B2UIEN=((4qYKa# zIARqKxM-_+iI)}_}cErt8+sReLFMLv^sk!~j= z*-8}hNJ08Qo9~A@2Mw@5sWKx?4s8(8F9OA?9iiMi0%kW(Q^j_BTuAHT=nlk7oL_8l z*sB%fg^FwS5rFY!V0bedV$!p@C)oHpbD!i15S#pl^?n!ZYxf}o!}gX(r!XcJ&?(r? zKpWNiINxBl<-vRWW959}W!&+NP2%MPw6n|9@9k}*KWlJyt)zYb%|wKs3}2C0YyS$n zw5rWD`&3-Ku=2a`u>QXvW@{4b8`K5`OnD5M@w*ZSl|Lijt%E=JIm9{65(F;)Bhe&TI>!fyRmlf7UBbE z`=6Kf6`2AF6dzf}Xofv%o{&6ftU0#~6D`{JRfIekUia+2rC7kyquc{WH}am_m68W` z5)8XNGI**Ze~*6yT$@+Q8#a|56Y<%`cN?^C3fte>hbbsAD!a7|4TP#mApDHut7M8S4r4w#$VD)SkO6TYsicJ2>^3sW1369Sr>3QF&I{;=SXiYsW znaN~*QODcBgGHfs;?ELFY)92u&DN?-Rdw6oXMCLZ@&Qkpyz*~qmRoSGX*SNWmp0Da zFs9UR22l%=k$)WeK$)jXCiqR0zX6ZXzEStmC|*>G%_GG&c_D5m<PWNlX0?jGmx9TJRx0%ciMAh&biGyUIiNQb^1mPhOLlZykM&enu2 z@4Xdd6x`5vvyqJHX|POwA)omfb2pv<5>k3ijUOS)oQlc51)8TUD0h35`$a4O%0qf8 z2Qu}c(lLoQTkn(%CiB8RTx`dzV9mxSgpo|$+aLrc#xG}Z2-`i2U-!=u+-ZXb4C6|2 z^!1QLg?b}UV#6PiR59fq40=r0I4*9_Ajv5auJx4nJ?6v&Yo5ksT`ljO6K`WFsf(y#8y#i$P}>8HzxpHK3;GAB=wH4l zCct9CAPEdGQ#wmXhJ8TJv1ONU5(Z&=8g#J!t>o#PBk1{}*y2imrspXa?cbG#+E$^O z@_9Tc*o0`?_#@f9F}5;8`5Dre z7}Sz|Kz6i;zF%zXW#$J{WnXNoH@*ODgNpl{T5`4iNy!DF+DfA^HOsgKqB^bIUm^;B zvQjRyZ@48memv@qI_1o>Zz5vj!qBR6>r{P1BHu%iR_CYp)*zaDR=?F4WK7z~b|JyO*x5SJ=Oc$p`9RJW3 zuDhshOt~DTG*hdHM#zOc8)Izw@_7*<344}4f4I&06E-9PD8N`L$9v@c6`KK(kbKKN1RC8i_o03(t3OA};Tgy!5=Ba^~t>6^VW|!PXLSxHVTjl=<&nIxQc24+k;r%B% zjw@~M^0sK}hBFPG9<>06IIAK;zPAHpC~VA=46k^KRD^~J`0iT-_NM*=r=``3sJf6%%#Q3@sQji6n3KDTU3=gqXSOyf`o% z-oh4X`<1nX%Xud3B;wJ0=g4h_vduc~L61#;+L6vYWU}1dXIaBt^e|~>;2c47anf%4 zKZd$@OnR*RDY=o>@fUB7Pb34!U2;$q9+`uC0tDpEtdE4bO3vmE0}h$qfxcK)i5@{! z%&c_mc7^;)Zm6i z`2Xh>7<$ERUmBGuu8u#EBbwiJ#cAf13~tO&s$4m@OIK_W1E{aM+i{2H?yQRE3C{p= zNfG6A;tr81)Fl`*}PB1 zluAGvRr-<5qVeZnFTsE=5ccF`VuP3G5%1poBUTCmQ~8VczS(07fSdrdQ2v zHVnuaxk&2#aelu3h=gQgK|T@5R^a_E{zVBw!+Qx&!bl!UtwnhYy2|Fs%Ljnj3>*5vUwj8aLFwSGRmZTH*QT4eMl9T0&=nk2w+A*(gqy}h`?pG|yW0=e8)2p%R6ALqUY`Yr z`6veT%oj$0%ZYU56Z1uEQjUO7bOMI_T6&DWQ2RkiE4p+r8YpH7MSkt8NRcwEG$i0p zj3I1Zsn5FjsVeFEgKyhe9$dM?vg@zK_7v(_tHas*=iMne^O&L6JE}JC#;MN$9-RU= zTU1<4@M*KRa8Tj=dfXyqAkWJGS(|la5$>wh0X6mvOBya-DLydyF7DDfe)4JlG6>h2 zL7FB_HFFO2XUiVehT70Jo=-Mb|Lm4tG`T-i0Z6RjWB!FVkvoFsN6r-P2!H`sD)`c} zVV7Y3ZSAAax^MqEYOZS7qir)9LaOS7a_zmYg#NoxuX}YwjE&CWf;AQZKXrv zz7+VD!kn7f7`WmUCq4MENqL`qh+oXB-m)W!2fDP`f4vwDh9vV4oW&O`SjbXl28qS1Wh;Q{S zMK%^KMp5fs@oIGLhq+suoLjn3vd;?NtpF%AyA@iFq?U z(r?(8BmaAdbjXtG`^v5>zpc{1`!$E;8Yk=T{#Ff4>t8WB z(^4Bu_VnG*Z(1tbWaXf|KA-F- zF3BtZnrcVg=v(k=K&4O7oLdA5&1~oW-bdfVM$wxMA&7p!x;mF&0bF}*zRNot8^G7m zlF_Az3z4WveOyEJD)=9O9#j(c8x3@x((0ecrDzQ?T@~1sky9kV!Jl^=EjQRxLKc7T z_&@re3*ed>_ei_f{@=(0$9yT_is$neio*k}0ijL!6C-4ZQn#%(ts`Xy@Q#1YEdv!{ zDvPMb=Y#IXZmwjFtfXE7rew4iCo5j~&Gr64iC9}UR~A5mox>7flaw{3;1-tfdCwSe zg}fXM6WFPj`zk??GDPO3GYT^QCRHi@!QzwF-uYs0o7TYUX9*o>37`-n|Fvd3*x*HxCoctm(Rr5MGoB4|27oB9gh1Bl)kXwqP{R@WNGCC_uuxa<5-`Co$;vxEi~ z`0GL=tTvPH@3k;7VeQUw2{M<~kgBgQCKw9vnRLV6OHvJ;MLv&~o*?YFUj z6fJYq6eDHp(S+F*C__C%me<5YGJntPZ_SE-0i5-#xHIHQ7M~c^o!q7&#hK3&q%1bV zw>;wwxDKJH|UY0ibBz|?RIX61oc z>RY+Sbf85`BL2SL_C$)Mp~A9797)n}&pf-#gdoB9MV4Q?;Jw9*WD8*{2~x>hd;54bYwYT(TF(sWW`)cv zF9r9X?SXp8vC4bK}>sLww7VT>L*mrT`s z5J|A^tx^MP&!$dk^Flr26m=HX)<7}(B&^!pAG@zKY&EK&=sC-2&|PR_c#?By@WsW; zpe;H9qe?<9vgSyxOE=7zm(Ao$%L|M%mq*TROj-R~8VYtMSRBe+N~DNSkkB=N2^+Pn zNTD6ihtg_=BO=AeK*x!!ef6HS;p>xh|QR~HKWmx^=P)lyz52yzLf1`AWmPL}7<{oVl2&qq^` zrLA2bv-1KWpTyEoKFMT_xRYIKHY+yVbd{Qu5f!y>?6FhMknOToy!dD&e)oTd2youQ z_%!hdf6S}86MhNPGw=;(E}iJ_Cr=C#E$m<;N#UJ@rwc@6lvE2e_9cb_PZ0<#K!J2z z^u5E%Pq#ulpe?8Vr-ybd@3fWMHH5s&*Jh5In{^(AGs)X+eo=lys-ZO;M5K6*-p=l^ zX`U--K!!TKj{B@|JyfhRkt9l&^VNy$Or~Dq=c9G*bn#Ns@dhZgdFrvoV|vkizrI`f z+YeWx#=a_Shq%_|9`F_Xp?ZkYkFWsT>tq8;O_%;?yB&~Bkq2Gf+7Te7QPG@|>&eF; z(!)LHW-qSFLuQz_HcnN9)~mImw>Sps8EOOVe!QUwPvMPLo2a--7RH35gvR78z1lKDIBPn1Q%$ZI^Y zA}}-c=@c5O2#*A>JCpE*QeA4b*TpZRlE)`blEk$E?2d~bU)58lfBT`b16Yc-hTHA1 zRbTHo9IY9>Z#1R+MY>z@Sinq0rd3s;AG+Xw#7cAIoa?(qOu&h;`eQ`O8iU=InZ7(^ zO8frtyZXH4GV>e2#y~U(YGC=S;+5KeN36zRlU0peu&|$^u5xC-AT7|#dv$6E*+UBK z>I@M+>2>as_}}8`zD9-5eLP>_yUIcoID&`L zt2}J>YSG1_0Lz9-_b7th8~!y#F48S^&l6+sxoyouXOH`8uK{-B66wWv*P&;@vhUuD za2o{-g2ytgp%&JAI{P7(9s1+Cm`^hd_>AZ88t2x*{Ng)hw!M&V5dhbihH{R*eDvi@ zazcB=d_hq}lp)Brt(H;L+nf%{0zv<_>*nxKGodd;-3nA*hSP+$k}&uj-|p+@ZLqTg z^3w|g+6Ke`zp!x8JGl1u4I_ z9cf%!W}Uo`;n+NB1uC!R0RVh>T$xTkJQRe01$EOn2rR|ma?h;>Mqc!FfBjL*C`8RP zvK%ge3L9s#o4}V2vv=qmt?H^n0ltw9nRxBaQGylXbGo&H{A!y`wRJSOF!Ke^kJ5+7 z8Oj<}vD!YouTU%vo9ws~R10zPE~_J}k;aRuu^lBYmx6G^z(bhsAxYXXmUwA zGdmVb-CwY!9r|V3nflYJ5+S~XP-=R^3iL%HHRUFZ%!3bBS2K+AM76oZAta(uVrUPZ zqqxm92Ewq(9QZP3EutjLuY(FrX(U1>)Y+MeWcC9qZQ1!Cm)R2X-LR^u&Gz>CKrG(! zpw$Ig5ASbvN2dNm|L@@|%wKDXk4Wp{ok}#e?v_GfAIQs(?OxktZt-7JL^%(hL^jsx zy&OLpX{dJH>SQV4ooKzdCael_KCtr9rYzx3>(I$=s8)M<8sY2hhE3NnI{1(lGYi^K zc^;vLjR8(|Kg*cr1^#Tz5`{H;<$&*>V=8#cBE#VV{~i-xuTRdAWVEbqAH3-Xs*6X( z^F|FFDAe1bA1XJuaEZ6-9HlL6i?BY5 zL7H=D32Vf-)Ycv$_YwB7(QOy9K)$a;5);35Z@F>+M9VNWV_Pgwa1-g?Zv~*w|tVP*Hr{1?KRhPf8^78to^RIZVk zXf?Ttd+_Vg-Vm4=eYoY^@YbhG-p96e%p|noyg4~TGef)~pF4-kAkVnHv!()J5iW4X z%JhX|*wULe@7fyCLl}a@8QR7FV`mSYyX;PAFR#-s`#7PIq~0P+uM`=y@YzUp&#$!l zYS~GX0h*b)Dy)~A6SMxc{8Dd>=Ji6qjD1|;LZ;FmEQ?$ea&P$&oR{U-H9`~PXJuNV zq9LX(4bkFVJAGRqL*EmrkcZNNlZHDIipLB!QJ$9k>mR$7{)?7ElvxYr)XINF$)EcV z`sj_43}*$FNKdz!UZjphy27{5WWl%W7YI*}|B`e~b?(9^vF>e?Co=14)@J3O6{T#X zjsZRT!0TI~0k3OL(t;XiGLta_z0tll8X~CEb7`BafkEi_(_^Ge8O~zGP!9@Wr+zf3 zyb!c+J3lIbH>beLVT=lEs7$Ll>6X^r5>1n@=i#xPi$jtZsW)D2J4gfl#SI_Am|I0@ z6x1i;@r5#}lhD#bjYvuLDRtF9-e9<;q^hn=@oExVXwtBE=jf=gjyc=yFl4zexu9)G zv*7`Ux^pI6!`|psHWqBj_8)zsu8C4e4^&ipaxQSP&bE>a;xERnI@YIapJ0`_2Ph*O zC${Kk+3GjH=MQzhLLE*^Z+YE;3t9QltYXubc_*^d!8)W9E4WY>feydEyY$iP%1W6T zWQoNR&DtWp+N$64s?*#2%d4W2jiFymJe-fPh2MiNl}Mbx{g<88$x$x9C6`383Q)e^ z8z-w{J+|4q@3`lhYoZ!=#gzoz^c+il&n~IT{2RICQ+kkd?2t|J&~QAhw5t4(#)f~| zPOmImWhPrka!KVWg&vYX23V@so7P?Wm9@~hO&p8vs4Hsi?eWq0@p@$=@aA6s;`;&yXxKy9@1#WVnc>so^9U8 z({;K6hIda&PsvOViDJ48d2mbMSCiC2`Zs1`8)aDc+7FeK{yP1W1$P;K)U9_LWm`2` zIBwDoFx5)kUmG*`aT=a*r#<9S!R+@f)!yCqoKk0LPp!R`g-h1evOdOGM1Xq;sJzEh zYXibiQ=W9?vw$!3s_Q8|4I6p)&6r%IW0ubmhkZ$5Dr`)r8oYZv@>FkL~Bu-vX78JwBqgENzV(TV_Y71+F*?F4Q zxLKsLG`|1nOHlacYRu9Wj<^;gUsS@NbZ$ZW7XC#GO4)2}aaLhXX{?W&LCkvkN+Ks|rUVO^^CMCY+@%bq|A8V5OF@G#p4*@aR$hCV?8G=;zy52w1dddL+P7xYLMp(E1~0^n}M3PyrD*9R_F!gBL#;9KAK}%=fb2BZ3<0;NjiZj%RKe6tKAo|!%7p1vWs|2Q?2Kc|jwh$lPzr*NBP zeto+3^g&1U%I8kY)oIJV4rBPK3on1>HVlyY49zuL{R!A?LvZqEk^5yr7`T>|H4~|_ z;!o4t5W&Ai7Yfj8$YM&~aNyH**LLBer3BI)oOy@u>?zDB#K_$Fz}y9e3A0MhkFD)-BNjZ z>Q^|@!=I%(43e?lhzuAnFI+4A&;PA>+@I1Lbx-nBl(XH+H6NR9u=^R$j+d!kwk(xi zmo2M4>xgMBnvR-7Xb^7F4>G~moW)rJ69>freSULnz}1$i-7CFbRZd_`C0>JkBkA47 z?@TLg;b|8hZ<{Yk{wyt5pucryAY@lRJ{R>gU>=Z}16Ja~xc>}v{sD7c52ZesS@&IL zOh$%8k|^e#7NQ<-n+MA8L?u%u$Icrpo6V3Zy3yOzue1`*`N=KesYNi9jNGCmHjbN5 zOn-B}5H{3*t6ixr*w=gMt#$j$Pi>Ym#V=N}=z%Gs;E(Z;90}|4^S1ucw<26sb*+JQ zl)s3<>u(VBnr=OBXD^E>8y+aOSi~@qXa^`~VJP~1Zy0O~;pWijmk;h!qvqB%yk|YS zq%|EC3^ETJ@POf0UH=zg^2Wb;1f<05uyC-2mHB)OD35I!eqW!Kf`mW!`#^VO;1ylj z$c>4Bk_$;wwVNEiGU zk|_zfU6R_ptaxJ@9bdiWjB*v}pmqo5fMVPEYErVcezUm^%517WgH?`$2*Z(cMgDZH z8jsg!F|a}L6+zRob&Zr)&7r+B;~u1QkL&g)&GQlT{c)`qj_7OFdF|<*SvcQbNJSfX zt&0r`Kc$t{SzZz*d`VsL%l>K2^iy$?W`;;yggKg?UCA>Eki9-`Hfzl&w%8JqJe5Ls zvlt!GE9<6daL<)oA%{T+bl zk!^5b?K}<3zL45;x#!)7ngsj$)+&aM$`YT5e~7a^BZ-v#FGjaFS3>c(c{Onsg}!qO zZA8)4IU^?k^WK4F!~1n%cGhv2rPx z*f}8mun}ylf`UB$Wyz{wYhp8759~(A>zoZ|seCUNPn)t)_k6{k7=Sa!_^RKxI$FX% zb+vUvmNy!FQ$kix{4xgJ7~6Kam2>+gC>@3?b~~)JLiZDj4Ew+I(|C1G%b3q)9_^;|Yi1@J z=TJT(q>uup;U!h>R*r-P`~Bg=L$jR&BZ?m?8QfF~=ZVc<6V?B@QO7`l#ojoJq)gpk zJ~64iNhd@@ikv@=pN3x=@d!y)oQF|hZt{%{3QEI1G6CeUXKo~;>c0SSa!Ld-dY4fobT>B=eHHAH`@ z*;i!W(R^Bgcgc{e_w9n~S#!lHEhq7WSg8T-;*$$ZlTXJBAu>DE77)T%U>Wv-eqe!? z-Zh6i21dSn75PWx=scGEcVR_!WmCiYGG&*jwJifCj~Q9%ZnMy6VwineHeDl+L)EuR z8fJN%4fVOiIr7#SGT=WBoIXZkrBxgB5RXb?3E4MSdID&}CTBO4hTbye*Je;F0{1Yk zkQCBTVC4Fd;Ja`SP+aStxnq5sYf>x6>ur1$Iq&CW`$Xc85BdYOoV@^gh3#emGtrQ8IYT)V#ZN<=oNf|B`o`waE|Z*2-Ce&*g!4!oOXk*cgy2sfC5kP(H-1*(mYL%z0ijeqVW6 zNo{Lv79%CkO+lHf_@S~m1sqN$h8~`WR{zZaxCk8xy_FbbPkbC~{8beGF*+aB@Mo{{ z#*aT#w$A<*I)$ttv?B${)=SBw>bhLmL%g++b$1!L-E|B(hP-4Hb0sgpEjMH3@`avd z9DEpt}4P-8nF#Dz{Gv$O>4zYBy_ut8EszsXZ`QO_{? zCMSNVIGW5Ep;E?=xw^FyfMyS>>}>_U6aPmI^#h2#CV)7>)MUSG6Lv3$2Fe_jn{I0H zM&)FQ?Gf?UQIE&Z{BsAnI%ax^X-i-Xh3u-lNAo4#joN(S6tR3SWcvk+?;fK8-vU~O zww~3Ex_`%f--=E78XrKpEkeOvi7^;J?S4+r_P{>Pf@SVlx*Zid9eoEXw*053@J*E~ z`d#~r@$2%@+ZGCw2g?&25KRNii9=zNY^p9s!9*O=l%bu;?=HIfGd$Xi^o7UD? z;5mgwzCV1mS>Xrhe@k10{gp?G^5yNzaa^lOk$!NqhjqG*X8Uo&!}`Vw#Lt!YRi>HH zJPFr){8i*unb1A7zn~`Ko7C2}&jun13P|_AXZ6P$x0@^BQ!K$zm(6GO)y^atsZrLn z{JM}F*dyP{UaP!6D%le5vtbg31tGgFwcDfMJwc_q-`W~?{-ga_538+DXNjn85<8Cd zsmptE-*t0t1rupO#A$*)O3I+4b;^*Cs-Af4-WD%|Pq4n4?}g5|^y}tG`Tj}5U?fu3 zskNy{TY;*#+4;X;#YD-*YO)9h@wE~UvcwN)!gs58fEr_TN_tD z>yOo=meD%cl(?B2YaJeCn`6IxIJ|rJ_j3<_j*^TX_towzANaZ)WAa&4-Rb?}dQYB6 zCpq#J`*tNi=HKVEkzw=)+)4RM314)4iKEuy={1wn`6}1rNY+EoDE(}a+LE9Z7cYbZ z`su@nrgT5~0|nHrLUswnRr8*G(wVhbYNUQeT_~oS{v~hTn2sSXgfaFl0gnsdNG&%x zmeO3a*H-py8#&vD4rf_0%{xrC_#wm$z^W7ujUqJ@fZsZ5g5^~c zM93L4p6snc#bVv#jt^a-Pqr&k+oQdYCP2jmmsaGi(aLZ930k}|S|7l==5i=A=1RWe zot&1yeXW2~2xk!s`-ho%ZD!nt$T@QubC}pWsj;u?m&{*}NyK`}YCY>3r7Oe54YN`f zT2GT5lyqK6?V&u%agUCvWJ%ZbjN@$#2;SIhGhZGENRf+bSIs-m%pGe};ZnVmx5-@U z`lg;EFjIE0Aw!12uuvMs8+-ll&Zkv^+T!09zCVx>c~79x*Ux`{w$bk;emqo~DHSbG zU@{R)g2M+kZ$)S%ham>`O&&QmwcI4N&YkBTgUh_3`!*yIo>;z^T88^ zSR5j+aW}xW!5plQAIBC`oF&`e(IeiWjx%(T{PC*0*TMEwbWdT8_o3_(D)(y7^fKHo z+MfjM<@c4t{Da7xl#89Rxfst|mh2!S%h~R)TIGEwORC5PhB(sx1E7Q)b*At@Km&2G zhYb<@!L}r?mpcg^UT{32m-h`T-uL1vFQpJA>C@7N%>hT|x7{}1K-+`

    iFTz!Jg% zdh9wnlM+a>QINwPQzg$NkLS2DTe@#$oR!lYhVjVe_1*JOX&jN=K66pGop-e9>S;sW zFzb*e8~KD&a70s7M8e0~4pq0ZpM#11BcLRu&fPjK(;J`AP1?~3qvzT#FLbTWWVel} zP9Yt?h|mdEW(*$`29!|1*wP^)_5An(yM_wfnn=BmJ;ZKx`}9mCMD( zA8ea4)BO@#U$K5z2BLQRo)3E-GrLZk($WYO(?S+yE9ZslpAesz*84X`C0&+8v`TMe zakS-@FnOKb=@9}-U$xG~d~4h7J4Y>& zUji-H0K5mDc*o|bNIfpbg}o)D1MApow6$sKWsoz?t$C_imsSG0deg4FsYst@zXHv8 zf_lGWVyX^+pGaeWbaMApJH>1U48a?|i?kI)tecnFtHb|#<+EO~UKFq!O1R}j>qV<~ z;FJJGiu%&brSqK;suEyXLwVk4{~$?W0U%KTV4mFkhvo$SnyzXigA-|ur%*NJ{<==) zhccD|2f~J>IOd<)U+z3XfF#nZ!K=d#^{`Kizy&Afm{6-luq{}Pf#2N|1Ahl+tl+7E z#=pq7oeybTV0P2U)L~=gR)GX2`geBhs3NQU7vQg*W-GYBz8eSMG_urF-6d+$eF{Hq zF|hmwvm;S@N&P>}x7%vw@?&eO%gK>D_ZdeV`-eDs?0)e^LtUzw0crGT)D2p&Fq^^B zl<)a^kSn*~$a@YxCb+gt!gq-49&1yDoW&$NtcO$o{P&=X zaQTCRIy*`>ayrc%M$g}p{BqwZ-y}rq^S=@jmok%{A3dWqGHJ7rU5_4+{*TafhIWfO zcy`>D&e}1}+L=ZNK2ha@^XFRGder)7dThkvZf5ZMZerK4VZ+*Mm zV<0)kxK&8$?bq?HdK>qriw7m4RQ-Zg8MHMg-P==O_(kOM?+=wxjInPsciZOAALPlY zxbwx(KidY%nkURDGDN?yAJOV(mZ3mzeh4@~Rs)-Z2@`>HgX|Mmu7Fr5*pgyaNrxSH zJ{diOde(Jr*KdElP26_-ZQ@_Q-kekuDG6U8Xl1!up)`C&tWcBu`^9R^*&^S^+;<@@ zFHg=ygF#r)Rp*nI27k4We+R$1N$V-@w}R3y*Ufa+;b1l}rd#&8OK;zmTLd_VPR<9T zQ)35ynlkpB%&M*RmoM=Jwi$~&eHN9{!y3yiR7|gV2(!!?L7 z|In(G<-r@ygS0Qcr!DpX6feB|>Xc{bLiH5qXcuLHuGGH&Pl9FX^9wf0YOcu4P)O}3 zNdBbEQS+63WZ`A%JufYbpaG?p5T1D0=78i2;djMzCSAGB0HI@;locY=i&|!?CdxB) zw9Puz@L6zi2LS^S1|se*!{f;t2?4AjJJ;@_30j{iPPI{bAR8BHs)^jbnIva^5S?;k2)2pDw42LPWOtBOr!?(`fy_Y;(b-dnGJTCD-w5%cKT_Kg#~ z6h6`W+b;{!sfNdjH+bk-k!@?GTS6DRdr3)@-BZ?uPn+}17uuo_cNyC|P3EKTHG;df zM`@FN`0qE$9=q?(Vzfj0LPgr*93e-kGmddRr6Qa{CN&=3a6Im(e(6w4;s34%g7qH& ze=%AKJ+K;3F+bKo45)9^Q=4JXVBoM=wj@*$)F)jmOsOeBzd?AaW`HQGK;=@=>hQsy z(!O+#b$Q@p%9`fF5$Atl18We_y7xv}1y5z|^GI%75z8&{UVWoZ(RtpfR_%&~TmoG# zb-I}3x6N^j{_$M>i+d@PIGHhWmhr;N?Uk?9weTM*AQv3}wrN8Br8mBIkZk+t54cLTNA(1IlcaqW!y@(;a zBO#9Wo8HLd*w+n%LS;icylbj${OIX(gTu6>J=6~ zG+SDrx{mrrEYhVu(ZhblBT6?^mym3MSVI4#mZ4hq#N`~Oi9}(n?*#Z{X18!y_0Y?a zZqTAUhzE*{9`{<7=YT_grV>O7&nKU~mj%Y1^#re6W;{qTC~K}1&hD`(!fokaGnbuV zE_1C-GL`QBSa`S zA%z)?8(-R6B51WcemCV}7Uw)q>6dE>KP=Rb4+Izlx9XEEqQT(IbD z%6xkA!GP-UO!6@am*Vj+$rvQL;WtYx-6aVks2iBK#MPZbDe~V3o?3;3`l^nkeo_gM zsGua@!N6`jn_Quv`cx6mJx~|(OzvFc#8Z(G+v;dsq##)KH@+)edMitW1{DV9iQzv@ z2ps|w)IT6q8Ch@WX>KYiUjaA78 z&y744_40C6c*Bb;t+-^D`n2jz&e_)+7M2DUmhHYVzBPYq^1khY9J%~P$yY?MoUe_Y zn;KqX37P-HfmfNtc+qzl)(ap7K$ZnrSBS_-`~o#*RT{=uM9UBw#d!W_h7B-S1yk@o zq+Oa*TKT`@(n<+h0#PM-2Qbu~%hA&H&6y*qq%jIrLij_u%bidJ97`ZGSDhY5hWx&= z&?Dl(ctyB%IYwUYNG4M%BgRwWvk&!B{-%Vmes0?NL_CxwhYDq9M68D$Q^W##4z}Q* z8rjKw3hJzK|0;P{ZO{4lro^>}5!wce#Ssn?J8nP>LM?7m*iaa@H@a3e47K8w3i-=? z{}R^NQL2>?6H z{ErvB@CSy^YC31(N+~}U=7L}U-L<_CJ3QB5-WAhC`{!|3c}O|!e{C? zVr;lZ;Eu81Xp<}jl)AR?$-0U9%g@r)r9pS(CSXm>31=bOVjzlq)DM*_>T{*bbC_W< z^&`!HD^zf0*7>E} zzq#e8f+7EX!VG}%KrRb}vV;+8e+gyMSgz?auZ`D|7~0a}+Jb$RJD>b-wx3THsLv@I z6jg#*lyKovNk^UjOC6Z9;nfOoisakMHHod;`LI1!mgAO4v5i(YxWfR?AkGXF#X#89 zUpn4LV$P|b#ltS?E9o=o%6sgXv0SzLAA%eLocl_@ql-Pk(el3<1uh(xF*6xf)t*px ziLev7X=RmD^Z(HFsW2S5qVzQ2%F&JZ=!7ygK-}8C=9>2fxR}bFr^FHTwo-5>l z_zb!CW`O5BT)4GK!%AyHc7n~99n`%2(oeS>$-P@2+_^>3SZE4pT)9i{{X~E)-fHy- z#tLqOQ#?O<->I;!3)Uvo!9N~`rXV*`NY#O3ZrCm=_>f`LCu$$l+Dgi|B!KY&BAUnh z8SKGX-hqLlHPiEBO=Cg!6QT*~8_{3hd?kr9_)CuN$)9z8#RZ{Xr7mxlsD zf#k`bKr&jb5?o4Z5oZbE&6rct&YCRxt?X@MRVZl(3Mys50e{#TvNA?0$9{!jmuSQ} z##A3AC01{c!#Xopmp*Y+OOl}|?y@GLM?YPgPz+^?ueW>foNN^@e8$XF$ADNA#Y?XT zjX+%T`y`1i9BMnoCiRg;e>aF+37a$yUyP=bJSrkjotHYW1z`WO)iPeDOaY3<3t4{W zes4<~+ydr?j-k(wch($N)_wI4N^{XqaS=us7;KE4HAQyJzA>Vo!U&`X0Q%(=J0 z<0ZP(4(NqbJ%oCTuC@G$q)U$sX19bwE@DHLRHg+hFmU^dN4(ML>8krDI&ATOs?2Lm zs_qW(^)c|g*-!XH(>mR#_r+y74Ni}uniM07f?LRcHFXIa{YkI#^IMOp*v8Bl^d|!c z1)OcPMA;=k&p@6FC-AX(mvJHCg%$8SmFzJY^Jw$|2f+<4$z6~)lLSAoFcv+&*zkrv zY?vg|6D?fF3Y)3mHTVRLc-t6KX}tlt+rG&Qnqw*5TN6pUVP@nvSO5r6!!^dw6h z(Yki{T+c6GhS+T$?xD#2JblM>Y4NibCsOH5ep6~=av1RMOl?Et1oDSke}z@Q^#7s7 z5wcBoKP|a|9oFTOip{r~n#*q7FP2FkPR1Fx!j=4nKDdk-3w_3n_9wr*V54LnY=+f= zV?aXga8fgTZ>a}QF)<7ZAtvoZ6RmqUyMhE6sOb7H%2LLlnYjB&>XX!1_c@U^hCZnx zkR@IUK)<*7DEJtS1uZKztK-=FUZ&^G9-SMt%)se=*q;;H0x1d~3xJ7iqPx#bcZd4s zd^S6^xF=F9jO_v8-uHO59E;W*o0QU)5%uV7hc2xdZ+?mBP$C$2iplW7Pvd9Nc;*!%b!yrM2*e>rH&{$*qN+|4%6ej88;xTbnMu)EP(UI0Drhxu`! z)JU1MSMrTv0pb`_J`Z7t(=X4h4_q!9b8>S83mkf6dkakHRV-qc5ND9H4XpDCt(enE z_Nbt;d+~a8>lG_gnnfMv`=@as{W3Rr?#02Jd#|NLd<%66Br}q_f)CDU@UIPQ>_1l= z=zUguFn!xO!lkUHhPISx-uPUruX%%2PFO7hog)BB9$IZE6G2bJct{e!lU8=KiS%ar zlA&ev(5=`)l>Nf}rLP=D?5z^Wn`22+j43Fa&nHdRMYpdVez;%bEKDUa#5DJ) zoVb2Acw{jt$lcY}7eIcmT~2&hE)Hy zA2Wv)?H#*ASGL(fG9m2{B^h)1G2B~R+L&06La=2V4}Ua54}EM4S2yFOd<*5*5%P^+ zn8c9e-A&XXB==+&q|?_5H8XheJ(N$tru(BBc6R0K2sFZHt;Ggj=j6 z+bO%8kb_(tfRatxA@1XkNL~JDbMnourVIv3vn3~d&yII@?2POA<8JEZGsf|_(TB{` zKH5e>$P7Vs8hOF#_Af@uY30MkU}hR^+=johIWFBhC-V0@kGG+J_D7!yB=AgmmKoll zpq0>E=7jxnni@bj=-O>aQ4)l_{}TnG6H;(Jh>-qE?N~@zq>&`Rg%goQ6i3ZU2vdyp zC82|9p$j?-@%uF4o76lQrB29w=mE>cACLB%x;ecDIkjPNJ4W-c=j2i0LVjzVcf@SQ z#5?97uQ56QOqGYjnXRdT7+`1q>K)!1epQ;JucM!2Wg&EUd_P*oOb`H zTL4(qk_iWv+q1?_H`)`-Vj9Uz49Xq4T^NtFu zUu2s7FHKP!yE?T4{R#&F^&RXevE}@RbtpIE??EhkfYXqkGFJPOBr1F@$04N)Y)buE zZglZK@aH}p?}CX#S5%Vm+Z^=XrBW#S)wn}pjGiPG93>G(($|JTM^K=u4b~I&->%kL6KEEQ9#evUUsiN@Wt~~Zj#u5B#aPhR*~I*C zXZy-R&i13!%lhB^Xn)-RBa4trrxz1UTZ1iEnig2&a*VFE% z^Nnwxmj5!kN>sfT(aQ;ww!=>GY8mAUCbsYG`+pxp8@rTH|{CY71m~e9` z7sSU=v^L07yq+NZ*NO1#$41?9EUNffl`4qngS#654e3d}5lN!Pm!+H>+%$ihrAg>7 z#lB5R2!AOvlkkk#A8xIpc1} zU2Bn8{ar1(IzT>SHQbI}JOEUXkeDa5xf@K)enkAn^&Y|jL0k)}?BF-So?e_kD)zF~OGjoVIj!;y6uXUcDhGc~y*PDg@6fo}T%5MA+8}nwlK|TaKNq zF!^6s#YhO6OK^@NZRhR$QPuG8CacyqRLlht`D?9=qt8ACzh;lChb*;UNU_Zn!*5Ye zfbBvLrj2s%46yzzta-y)IyA&sCjK=f`ft}bRmk<3%C9SPqCSN?az; z*&+j{Dl;{<+E*}NaU{LXt!6W0{hMEy0b!?pAt5_iMPe^*G z#J&OW)A8QYr%~$@lg167C^eP-{bq>T_ICnDbh2zuU|q?jI(u2)@mxL_}hVcW`0oJf$<5s zl8ia05Dff7M3Jtz&A%Xh1#JlX0n+B%2w+J|l*hN*&W(rrFARjTLU1 z*WB5yzzbW5?*g(sn?}*$kIRjW|Tk0Bi=K8R zCXMK}qgL?21_M$fJ~bA8A0AIJ8BlNTcaXLW$;0pW^MkcD)F8j%>p5Rfgpruvx?1G4 zn8t~Dc6%kI>CA~qpR^)RZVkeo#mAN~(?`3&HyPwllyghMH&MAs z!)ZTgd-m|^Z$IA0@MytS>UC>4G2@xeOUUTawNBMp`IR*RbfYcGTq~)C+)#aVAmbA$ zy5vv)r32?n3v$(yh<2Mj^6eg4v;0#tzR`XPWA?rpnjfuiZ#yA*zc&t5+t^Pd3OhgX z#O@P9-m8A|u)?WZhotQ7{>bT`U23vKQa_lN?7QQOwlvp$YT_Lc6jLD!FP-3x+YQb2 z-}MamecgMuCn}@|)1=R6iVn_;XQP5XHta(bYeYMPx`u2H+9FDQ#q{P&KSyNY8N-`3 z%FF;hX6f?BwOUqDZqP^f7TB$y9q6Xp{OQ?8(p*ce!i4rCS|J*_y&K-oF$ro2BGh)C z+_^r=8^89_INfB_>}~{Tv=Q0M)2e3MWz|BH!@(_5`f&+w_q6rKTnV8XNR50{KBO&g z_WCc)(vc#F3@$Ne)O|Gx;y-15euwmFFnn-ac*`#eJ*y}A_fBZz)YRq{tvZVM#83@s zpR^J@ae4oZkQbJb<`1#%A^nq-Gv$N29{M@S8(-PPnC*7BYsl0h@`o5iCBPTrOW!?s z@wnYAG<7(X@dW7OSSi}m;yuUZ?Q!1V`1Q(5NY`i@^Un(iqeu`vD0+f{xw=bNYW~Y& zm&31nJtOUFCr0UW??n_57uyQ7_zt6LJx{m3cWbc4wW?9GhmvubZRpzRl%osAqqhIn z)IL2Nz_ogS!`cztv1Z~VZbWiGb<$W;@j|Mvc`UAO(LjJvT0vex!Pb_u?%uqtD~_=Z z!6keTR|_53BC+Ah6`A8phaP{KL*=+%j%D^d(6L`{<69-R%-oh%CZhb23R54cXebZ* z0;V(_qF=gFG!tx^76bG>sQi);REh1;F}JbB4rdyN?40MSl-SK~HPO7U{w&OduJ+u7 zW@5Aug~^y73z4EF=z0^e*7VwL!P?DF_y5}!&HhXORZqx@1UPZLkgfvkOet}gHR<_T z7;$`K*CW2{5Sc1-4i%z!kZmGoK4Xk4e;S!t+U{+Tgs#t2R`UyixF2|Ta~$GgH1MAFfI&WdZS+31D_SSl&|;eg zGhoDo6r$gc-OA`>M-*I?Vw!}Wb>1mAtZF`y2T!J*B*<(_B(zOd;q)MM->BQ-jpalH zZSC$XOAXx^lC>Cab99}y-Dmx4D{fp)e_(Ts({EAXCICI0I$`VI)XwTwyd7<^TC58y zneZjqK*ca3(fYuX4oSLItT6wVTMFevEgsp*#nmfh#HT4;u3R~%q zY?d=Ip&{JgZc0!J%_?gvuZ79+zTTO_b;SCbG!&WYouf&Zu1u_t_s50l-O>+WdX{&f zeGYw_8VWfVfhyLPkN9+T?45X|dGgS`dgCMa$TxECWj1V9D3v&X*4;)Fp+uACJo@1E zTCs`5-tp=`v=%QO6wn-lvEBuLtVhnn&0O!iZrWKyK#A^|YkjW1jHs7i6Z_7hEgFNP zADT&(W~%P>=YJ3QD$88NxiAZ2dM9yNZ?Pzv+mg(v* zZ?7zUNI&}Dt`$hl=%@t7o}%-|oMt!P(Nn)~JVto*-G*X2@ZoVS-<|#FBQlc^ zHHqerg#4wYF{rz%-;oH~Y*Jt-&QvV)sps@R!TGbPxK}%{!$Uu0NZo!Gn`ja+w1E^m zq*t@*MfaMBv^$Hv{YzKJ?hOvE5bsJwr8l;6v|bdMKa!VqI1uz;xxc2th5l%eFp1F@ zJRQ1PE&6gW@qXo2jud&FCtzDVmmR`DKwNO46`*t$Mqj?K;Jo|;aya4UV}|8})0 za=S{qwj5(J1OD69TSn=W6+Z(NL*fM2`KBNy@r79HT?f(kwWD=JWQm_M769t7oTyNS zIjh3d!e3-;9{!QLp2xp#6E(_q_uiBw(dO!o?iBkXmx2u2&Kk@MYZP<{x3NybO61CE zr>1a$!58cS@eLMYcdnX>UBb$2cEH7kFt+A#Xr|cAVGtuLI63N`=HpFTI!xb+zVTr1 zgaG$5{JLn1R8o-iSAU6vZSTrkoC&rUyy?%=UP{Vasy?t;hOR|c!3t5;=O?Z{!5#h3 z79l%Y+fBCVu3YdeA2<#8hC53vb!{!p#K9A*E2CZhZf>V)(C!dD;u)$-o|Sw;n*?zn z-C7|rW60-uGc%}DS}f(nQ-|D>4um&GXP6strDrZ4AvwdHGzel9<*1WY3J`M`6N2( zU4DWkJ-MUYeuZwr87qjAI{I_FW~NkqDGJr}wB7VAa7c~U3)Q7LK10+iT(4tkQ9CII z*-AOK@O^&3>X@!w=6bZ5$R}HWs(k2}wmaHMGYEZ>VwLQ7@89*T;VV8SD|b=}(VZOjM_>P_lj4XZNLSise2?Pq0bV7)R_EV{=Wpmp-JJ8(Dp)N!mUz z)$eTlu#B(A2RXCphx&rYX7cVCJgasyDT#N6eKXUVo&EHj1;j+j)ZDB8AH;Qsd5oUb z86HADi5bIC%(~Gb2vv+o1Fov`iu?FAsaR)<;^EB}_(vu_OA-KDRwI7phSjw^|HxHP zMGQPtSM9`W8AhRf3};e`U4E&EXh1zE6O`Q!aMyGRmt+j+@P~Sho*24^r}gAwWkiFN>VBG0acFiKD`S}{95JF*}>Hvk(HgbBBWjIA}1Ax9oXWRl|I>&%&j zzUjZuPqOeo&qm9q3hoLW)+XqfQO=4TLYKMzWImC-|gZ z@@}rK0XKTL3ykS%*ZmM@A}j!P@Q#<`TEU;EqvEQCT|xXRE#i{_TQZW3atj;8?4FjD zpZ(Mmb9;Kc^YuGPnrg98n`e31@B@3rW8EPr%bh6CXcyZIO2Q_9aDHkR94{W1c2blF z#KU2k{G_hp>=2V}jQ*n8tEv9Cq;kIQQ_88AM%K^ja6zYj2r9p8D{H<#`Od?u9eg#d zL8{l(&tn3sEf0a_d!No|?2q>q?TEJFsAr!eWO8IUJ8Y^TtrSq}(SHwI!ibl-U3*M2d*!IZntu4)caV_`W;1A6>h;)b4 zo#KQAXB;2s$nSgBg01$H7RR^g7SDLar|d{tDLOpaBPc@Ku()^;-(@kAd7RKBWwv^eaM)oNs#ISeY3 z1T1NBWqI*~&Vqa6w_gX+BYpiX)QerBsV}4&oKf3MyieN6N<-C4Rd~x?=E+zXf}*v) zwnY)VC=`@g?EACn&7ot88I)VfEo{xYj^7SPg|n;CDZKNmUDx{W46NnHYgwOcY{0Yd zfLq!{KW?;hnN z{iN*gUpySP(w)*gD8*?r6)39*x}yFzPfVqzw4d(^JMEEH_C6K1w6OMhCnhvm_wbF+ z7reo?haz7C0i|u%1g<)Y`>J#EvBmAeK%sijG>~*7bSiv+pkyBZJy`}1(VW7+#oQLc znMEg$*!ZfRxO5ou0C(jp{lu&^2M3DC#G2f14F!$)e^u}TsrTz-@gw3CI3Y?DeW{;W z-dm8aFXF83ai~Rjs}Ft0Pm<)}tQ@fbyX)5Qa!#!B&~Z;yj- zKrD&onwCv&bSGcO9>kL72r+2?IC4e}Tz8HDPTp6Jysa};MKjbnS`kz}9>CQG7N{fnT#Dx+`2-RfbExhqCBc@rwRgO<~(@81Nm(4Bv&Y(k~)#`Y90 zsvp63fM=(e3$?I*c3!=+p6eHI!k(9*~0*oP~-llRATIzr}4|KH>QF7+OMyQR6SnQ;*N+?8T`! zwoXr!2d-bU896ySU>DKA_os()12woyiwe@pr^PX9{xc3k&d9B`Ao7;+|Mi5aQych2THfPafMi2%O5g zX4YhU&0dP#ZS%nBcG$vloC_QK&>{K9O7m5{RiFW$%y>W6HZLxH*{bl9+_mInun*zi zuwi6}3(RgaTC;r=7w;IL?Zbmre>{@I3K^_Euc^v=mGTE%4}P(QXBeUi$qKIdJ+;;I z6c6TtpFJ07>l6wSzMTTm2`@{eq&H0Nj^n*V@nrsfcSXgx+F?{S zGsJ&?*?+qxJIqJtu!x{WeQT(4dC+ZGynojnnf9n%deKo) zEKbTR_|}olQ4zKFU4DhJ!&vVZ1!2E5V@!DDsEjzw z?$OK4F4^w-0i5kSzS_U&T}ypHcWJ%widFlvZu|_Ba;V+gX}6Q53GNu3TUSzf2B#K1 z<(Sl(k5mZjuQ{SiM5A46e|TA#`=Il~!yFBTHFF11mx~J|&i~Df(PFQ}NIHP5)m+{d z>D@PW&Uh*Iy2JPz$PJ$vL;b0o?@#r`2`0ZW)?Gx8uwSC2$7AdQtiE7n4zodESB``r zvXu+pN5;qEa>8WPo}0%C^m#5Ub}Kdc=tNHi3;F9}!`!!x+~&oFSGeeK7R6+3HKx+~ zd-zMts*3h;1bKjzZz{5As(;lJy?)U!eY;$M_Jqf~mX?2!o_F7`w`SK*```i_@JUkaNoFY9?j8zh=@T?N4 z{@R|~e+Zt;(DyH%pWMyedKpLo>6pus~}XIt|$(i52SiGx7!r%{BMN;oz^hFi z+W-6G{WW)Hj>{00hAnt0w?x%u7dXzMsUNPflz|w4f3E9GN)e7smHE+$Y z762agFlRbPV13gR%9a1e;ly1Z83BBODEiC%4hyNq%S6ZvqR-O=0queub(b{ex|}-A zGnfKpX=ucMVZo|nzj#)dBolw-{bJf$fjF3Ff1~{}bK%nA6sSUCWa_N&;#p1=L zBV7uKkJ>x~Yg;D~+3%BpToFDWFw9aa04p8|1h%!&fkTXkt+p7tX|1>xZPGb}*;s%h zd8F<0#;@m-q*(~Q)ytb9^D&a6JEhTM*zdJ-(Z*mL82bNO*VM>OC`r(N=HI*IwCXWW zO_OWmNrUM?ZX5f@`{Mp>!bKq{lXi(+SQ9A!ztGHg66{5AS(T;*Xh1WC5PB^Z-2Sd9 z{2moGD}9T-B((e2t8)`@a&AWlk7<;y3IRyu9-ptE8kuem)ROPE|J(J;>$=G-b+UK2 zdbDp-GpLPUNRGVyw4`Nh0h{}0E}~zU*m%R#w`=!MJ`h>KHz_o!U zH0%NEosWHfpin@+Rl9(l3Hdx7ni{)=Y)cW#v$p~Vv3`)zXeRY>K<-TJDsX?$L)vdK2{hV9&_V1Cvcu25K0IVd*&E_TL+mqqYaqKIp z>L#g;TjzAmb!)7wX|rB3mDFn2(z_K5WPWElqO=U$N8zV@cBEAHs~u0!Kd&{E92qkP zyNAfKbwkcNzJIrYW`JAz(!k)K`m3;MF*fa?bKBcju3psZVIXKshNpUT)m?pLTZ);A z|GC7?atvM4(r?07^%1l2u>%7$r`p4o{@@2IrCH`%Jo1I-g5F8-Z3%gm(#_2N8nZ^0 zMXp;xUy#@{Jc{p}I#~+jBr;>fJ=6&7^oL-&*SoXiOdA?-l7qzvKZJ~ZcyE!_BSb%h zkt58v#Yf>Y3ok+~%=lxMHr`yjk#V9c?crGMj+F29#-IaA0!(m0<-cnk!f2@) zJcHxdlg7jfeAZX$^Hu-Kvzsz2q)dz9{}i1mrDC@)BTfvPz`Hp8n~T_A*5_&T$=^Cy}RlSvsvm2 zXtGdTQ*kG)d2f~WpKj-_VEkhs-@pcBLHFB=_BBk|bKlY};%0nj&!Mk%1QQc;-fIXv z#l2eWON*R1+plJcy1O*5O;#NI*ZZtpOXGK8>HD&|(oIeo!)jH0JNItnkb1f|MW z>;S(4O{l#d!uaO`2AK+1f=|B_!ee!U46zA3wV)fd4I=nf0(?X~jiY|s;LlKXq>GQnF@Qc5B7vrHO+!$`)4eI^=LNOF4~Cmk zXEi>~4?Ggd27VTX_7h5@=ui39S~9a;vzLK}SuUDQq#}Yy)iI?^!Tw;IQ?gQUUKXG@ zUq3J5U}>E55q9VrRl-ixfb7T)Ym+WOqev^Zzjyh1-ao6RxqRP)JN`l^QLQO(%56-K z%hqd|Gl6?0=ZjZeiK`Fu8(LE7MhJ%&&EGt~+I;Em5`O#teyi!DQtJ*F84M5<%>m%e6( zYAayR@m*Xi>#_=?F9j;3TO)KqeX1CMQNlzIzav!rZE4Iah9;3X;G=NO8o5D(_wV4- z$m=^qpf0rl24clrW9qE`7}5R1?|EaJJF0cxV9-Or$>N~5yC@~LV_TE3Ax^pWr7r1P zkQtJH9s~e;(Xh6cA1Ov)x7h@aKmu!8Y9V{DPUYkLFI=Veg0G6TIQvBUrh|juVVO@e zzTX|be$T_@^{-R5z9**6L?GTb`f;?xD~~8}^V0OWI&6R$wSJ|d71h!vd!tFDkRXoy z5+QgcwYv6&p&_y-K#&*UT?=TGv^wPkVSQ{jyH%w#rz=q$ra9{Ns?P?4RIRvt+Ph!P)e>Jb!}?LKTdCVnH`bXF9w1ciJ!`I za-k4Q7zmX{4YkcuTtPEB$F;Pz(pqyZngvC4m5V89nFG z)ro6C%KwB(b-}=}1JAcnBdz|otH(kjU=xcL&X}cs+mc{~g9}$XJFNV7)C0^=SYP_N z#mjQdvSqM5%_YWE*#$Ok?epXw9yEn~xj^93ax|cAwW3hfxkN3XRMus3?+Llt5W%#) zRUFy0Sde|Iyf4f}x!MBZSPxF}6|-S%{>0SzT|ZobefSYh4#8DR^1*nl3!i@N3~7P> z2pm_SFtQu|()^Eh_RU^bY-`)5I3(}v=N~?Byoy!T^UUX|dwFZLz(ZV>*bvvNt;4gt zx`eYjm(qwaw%%_fZjBmw!WtE;7eV3PO)Hyq*PJZnf35@)O$A9>>i19JUX4&p!fx(M zV+Kk46Wb#GSB+Op2Jx?PU^QY7W7@D>#MB{pLRIno2lKs#N@Z=Jy3EU{PWqjv{x<9z zI^jyk*FQo!%metx;onJLAhGUp0x#zC2QdckFBV*`d>tIP2Dw_38 zuJ>wdFK@bBh_*26z;t^P$B4r#+J7juC8$n^jLQY1gI8BT`i8URU#6eU)yYw;jg{dL z>{2u2GAiQ)Jt_ zIhN~*81L@GW@jfSvpN4@p5UT98#c@z8a`<THhK?qV8(+ktIQMr;~b<_Lx zCc8oeM__HqKCtPd;=H)^hLM7ZC!ej}L`Uib5|$Zc>5?0=ctPh-$C8ihrL?)K|8~tu zPourD13CmJhQ9`NXZSO3KIsFh*vc&KS>DRQWQcnXo4T{vj=A%>BD*8?g@?on3?<@e z2RkP9c}7ixZ~y8>If>EVpE9=`X}YlzZFdgYeKz15d%=`0lN@#&+-?gW=A+4oI!1?urJ~uGmbm6mo zjWx+-^y;y=xm|4K&oYFIOZ2GqeQrT=WHnm3*h~s73w5(k{YX>K^Nz@gyh*qYx%Ab< zLVs$Ke71jgGAW_$W>%P8b7x23lx4a|yb=xkPp9$Iyy4`NXP!-!ZwtCziKAFZGZm~MvfnyJ_`>BD^s%WjTl02N&9j_#P$meL*(o#RQzmsf?(Wp)M27;f* zcSS+wHsNI`l6}=b9ngOh@Prh23iY}dZUo+0FS-8L$7r*jMQveF!w#Ws+|EV8IWNwS zw0%H`MIN`lPRYL96u&;C+LGgTQJnIYrDoCSaC-)EfJ4y%=8LMTO2`;|vljd~gf5Q7 zo-@{@3%yivx$#&zK@nIZhbB1@2PbHz(2f5Uoal+@bg__8-BSPTcS`BajM~jW9?->A zF*h4tR|@-r7VAEglN92xx9fLsKL2`s9CWI2au}XBJdfQxQd*#>Fab;6P`i1$G-m9= z!ab2URouuIL3cPph88@iMacM$H_X1sQ%{go_-~=YldwDCK!wh3LS6o5c`U^h3zBkY zaOx%{@VaPIH^0nf-I~(4wmTu&p5=8U$)HKn*K+s(-vW-HQ-{bt?db>GBDaR0Y}AV! zcGM}9VWU;&*7DJ@7x^3^HV2!~6f-rm55!PRcGFkEGw)|`NtY7JhZ7w?Hs)JhHHruu z4e;ZS1#%w2HsXUp!|$4q>vOr+HnM}YF;98HO?W!ZqdxOW1X9eli6uf$n=w3Qdrg}fGkGRw)>QQ8(Nz1R)7(; z5)PPO+~2dSVx#R-DnnLQ_T3kW1zn*}jN}#M&WxeWTyDH5rF}D@yI(YCS3sp7H?9er}`l~e{q&Eu=r-?*Nd zf74`+@qU&r1M4KI+Ejy%+D5de#Ex+rE#_~W>qNP>oX>xA&&g*XR9u918XX1QddPiX zFb!|>aHEc37Zict@~=_f4lil+Py`NkAjViw`wm!S4g$By)S(eN@O2(d&j3fxeJ4=J zrc$CB&N9bEhq&3Mc|z9SDJ`0@`&GSos*DJa2VTl**z5cifiT(uAiROB^#{#(xF&7p z)3*TGnDe^(UZg83B%p^hA-SQ?V5%pW}m*KJW{#mhgnt;>Fe|5llZ-*d(C-_wL`e3 zz1y@}XDMKYf}*c@Oa7tgW*3qd;1z$U&-%r}Q;wVRw~WN8&iDI~AGUl!MPOug-SM~P zqA=-)s3+1Qv?BS|Owd6x?3@0$OHvD~QZ@B?h_dH5z?rl-w0XdY8J>A(5rhAFylC^F zRA_`NoYtzrAM2nJzZQELT-QbhI4mh-3~}q+|J$`n#Rh$q7GbBejusS^NWa2lK2h#} zh;7++5C#&$OhtisB|W&SkVP_gu;Yogc7ssOEtRe=!htb?L0g9GJTD?4;4(h%5#nND z@E+3F#VbM?;H@IR|rS^}m@AIT0%9CsrK&@AF}6RCU;&&ggp#8UTgg_Bphf>Gk@wz`?%TGWG^ z0UhM?r#*vTil*#?>}Jl~b0iau+Ww3~dbs0GQ~ukvcG@$Xks;qO5gv7X$6o-hq(6-d z7~HJ+r5nA|Ni$aQOaO4n`yR;?J*2B)XKuWPkhVnVx>unKFVsKO zf6H3DO;e&j1izi`(zrrW%(K(7Pn80dR|dQQMDzU~S#!o1w7RFVTw|s3kbM_JV@wrm zT9#{hIK4f4LI%8EF-SoS6CX}o@;A^m2)4m<&=R{346%;40_Edssn3fp?Sz(V!VS-3 zFOnK;N+I7rAr6+`B2?1#OtK6QlPlibP1hh&>dfua$B?Ds5;|McnrAhFMQfMn*{lI2 zL9~eBq=3twNqQd0&faeNAJP3?&+51mEaz@G=W`n-M+}hLr*>Dp0>iPf7^Cc)&%klj ztUdGSu$*PchoxQneR2-#OBo(JR2Cwha7ZdYl?!&qeT>b$m0O@_N#q^3uz8d!lfzC< z$;S+~w;RKo87fqB{R%h*w7#1UHs|&cQ%*YyO{J8 zsfzw3Nu%nJf$MoJYe15~NXFxbY4iT9`k39&)Ixv7HGfWQXFb~nS;Zn- zb_hYOgPsE8Q$386|I3{6=O^1MSnbm!jn3Jp?)X>mU$y=j=GT+2AlgP7GN?AuO=9|h znwEX9_&Qli;0Q2 zTH&5NLvsKSHZdOX&86xzqVuMjF3T!0%=N`R>2N$`M_?|M5ewx^Nw{_tHuLXd2a%@2 z(aqcCO9v8bpz=gS$p{gxlT|%fZB-q2YaCsrWp>o)f!(L(DT-!bmT@hcpW8-aO5hpH zu~&lmY-O;g6__l6vz5*mcPU!t8Z+2mVXhn*|Q6IHJO z5(4B|UD(m2el!HP>61x0UOYZP7orB6P9Awl{>MOXiNjx%U1F&7^OO^%n*RQo_s-$B zW#A048L%#X|GhF+Z*t7|&FJ(K29-VR&@->Z_$55Z`ZhMgrv^+-@JnYgzBkj4)EdD9 zT%=EiQZ7>(T)L10n@1pot zPlK~wEs~p8qwD=_$hXQWei6J0YjD2cTO^*N{mM#iUw0B6AqD7UFRbJbuHLJoqbIMvc=yUPgZdkUNWC;Bvsspfke_T( zieHd!_z0zS=G&jyL>4V-Jr~jpJ+ww9Y1w8c2m6R2A<__Bju20nhATIAnMW`Pn;QPGtW5SA|V=nE!Ozt zSO~(xY_$73B%{zLqaTvIjv*(zPEO{%O1WwADN>VL=IR@f^)@R==RT$h%DCWd{Wh-rrj&GsM_r zcMbldNCT~yz4T7{de$}aDeblp|6>xWmGmO$7R8_R-kfx)cA$!+*?AcFW_S$vjD=Qp zQ|yh#_1S-~AxxnJHjS^W(1*g9d_Fw!7jTYUO!1h)TcYShquMgpq(=Cr3aVoGAY5I7 zssF5O+M|`KoYh%ifvwc4m2OVF=0|HpPa*4QdOaWQzZHSU!OV4OoT;j5XpES8H@8=z zwGXpyOSFhaqs+aiweJ44l{K%2hOLgyhO35kEbU;0OaC@BzHW`-J^%3EE`NNaG5`Z$ zgA4)o*2^rDyFvP$QY;DaKo~t#pYPn)+&7yq%2Fw|s71pejhC+Xpk$S>F$yXA_vRErfQInY}?6E z;ngYYu-+yphq9peib)2#i&m4YI#-M66Fif2f>zDgm`x1`0)28dP+R*s?sPt#Ih^8d zvMIjc^96R!Is(hId7u<%u;qF;Dnc4FJ5Ry$w5ZQ!k0{>Q={uCNu@kkQ$0w!2%moEg z*SR*MEob$n77dbIFzgevv5!L@>-SSF{5RcY!}Z^+IymEY8@VV#i#@{!k#4Dd=NqF( zSB`J0E4G<>7P6Rew-;*pnKP+2M*b5IYHYTUAv+%Yxo*t_*KsZhc}ZGDor;xx+U8}2QeC(B4CvnAg{#y zB=9a1B(zc#sTEC)arcH|=*xA#(MD&GG1EUR^%zW2YeKVzUGoz3Lut2LzHgb{J1)sn zh)~r1*j*}5dEsnP`e->OMeatM7eM+;<_0nb7C`cI0l=F+VLvhT>PTDvOU!eImIz_F z3UP7ddSyMCweK?jE8q$9JNzehfJf`+;I43nH)+)uBRnELM4r!}He^Z{!o{X^+nmh~ zCG%8=ym%$Mb9k)Ih|(gQ9-YXcDT(? zLRjXGM%WH9==D}ND9_}LFLQ9YdT3ZZH01#Ucs|?h@y7&_@5cVJ8Vj2}BH+aKzddmW zA1yQXj$}OOnQSr~IgR-ZjipuJb^r5j`4xHMPiXxfQyO)n&DUDD#h#SznIG;TlPK&_ zUZFMyKyTGVRY$)cvU}X-&=Hb4zcu2nyMziTX&Uj!!q~tvhG@F^ZG{sru6yC`9o?q@B6Yi3?85)W*RR=}j&m%i_E6V`gWyzO-wx_m)}-kjnOk`A+3i zFHK`d5v`a0W|VAZ`(`i06syg)3RRv+-T}9k>P(gD2pJNSSFIkagV{NL|p=DybKE`xFV-R_9s{;gEcm z38#a_d2Gh2%|~Y+d2x8S#rpvD(4{TZZ>l(p(0*LbVbr3ZN>D=9$`Dau{1K9DDl&n; zpsVBSNgF9o$bQWOJq91^w=}gr-|nu+B!fy;hnBO}ZN{rJ-VtG`S1(Kem!p<|m{j_P zJu;-J826cFfLw9-mepLq!y#-QIoOKsm!c7M*n8i;X5CBiSJ!?JApI7o4_a^4DTIC* z1wn=AmRLiCT|u*&&QI!QyYZ|NmOJ45!;6T8t1BGjcgXS@Xkl%SeNgKqJ^8Raiv?d_ z4K`8R_^PaYVtWZmKvv~uJT1WnKz?~qt)g{R=j3;Oz~y_h)su4ijMs9#-|-&+x=`bO zcxHWAp!Ma=s&`5=bAdtc1C`Rq&0m(OBjFrudKTfUREhV%A#w7=&92j)6^2?|#6cte zb%Ttd{Mc3xUeMv?+*H)dqc$DH`%Q%9+Sss6^9xMfU+!BSQd4OeJ^8O5?2e)2-0o?% zY#J!0PoCX@w2iq~V65GYyEiC3)cez$)4X;|tiJdoDH(H?Q#-%ZGO4n0lF;)z`|oz` zX!)rVKIJSE`bDI1jJ<%t40@LJQ*BoFfbmiKNXl0DllmXmSJwzPO+L3rPQQ-q#y~b( zg|=K`Ql@$T0*_gsc%a=h5H24J2sjhMSrBTeDvc6_ z1qu?Ug0;|{;>RY6FN#5Cg^lVSEs>W>BFJHMS2-YMNc*Uh6=tH3tE(KtvWe1&c2xV+ zRc=hS?%J`6xIf_c>*_?uWcp2&dP4D$G$vu1po%*Fw^I4@BeGG%3Ak`JnT5A*)9qy?aFTbBTaCE{-cygEOtK?W=GS|!=`Nh}B&2U1{ z*hRQrH&kU^tmW%t@>O}7mt$y|QuR|U}? z6^=P}jy0(dt4hY5?Hl2}$&Cv;4vJr(u04WxH|WL8^5Md1ZszS5&7Kud=c`sr;4&8V z$%guolTg#)MExIVK*dE64vOuH1VLBp*2Vho5nIu>rm?=$)9yIX6Ro!ALOAKy-sOU( z*Sf|h4u{hJw5qkR3>Exndmt1q&xO?&ExtaH-seU?D@JENNfzQ^H@=7c_GpPfmMV#V zOubec+e*>8sLK749iaExrp{CoAe(qHixTQKQf3e*kU7x|>ZR(O&%`TR9f)sfoC$F( zx@>#_9!b8QotoZd^T=vx2j}}mdW1VEbE3t*`No#g0~mB)oGVH3jb}+}YZY}~Q}G4m zq^Sf?!8w!Zx{9u3tPUqm2Rwq97faG%{BM>e9*wqO3~M}elg4@D$4)` zKOykLy72k>v;QOM+vAz;|MzvP+@0+1AStXnl}bqE7?XO<*6j-2MOt!|+Z zA!oKqEQyixVV2_@(qtHhnPFJi>|kcw{d?cv-#;Gv!()fn`~7~sp3m#Lp4auv^H?QB zeG;X*CDX6=``zS#-pL~sK?w3$YS3{jGheO-I83VLaW7JtyTm9hqG3v1_Ex3JW}N^$s$*c_!SmgJMWGm zxXCP2tc-{ULNO|b)@C!zy~SkL5LqzL=WP5;%bJ%o%${&-Nw*RLbI2V-jWM?5szVE( ze#-WRH*c!DnTBg^)3o%{cJG@4&vzu9}V^SQ3rMn(8?f%=QgfGnw}TO$Kz7c`~2xjKtTwM+g;P zBkb7~1Z)XiWA@T5qpla7)WQI*=0(Z0a4Nz%&(K_+L)hA^ceeRb9SS+4BDpEQ7!^V) zcCP2NCgDi)G@2#YqPO=O`O{iA5`Fa7Z;?6QRtn}o?jX3V6}fbDEbC;-)Ha}}TUM1J zFUa*3oBk+qFJV6{>$>u(#l#-@AEX9dm8;a zu0nKUm?hD3q07ch^~QlGo*Bn^)C>~b|Cw2{XDqGpLWk;IYshWS*p1)@RKd(45y z0z8ZF1BgJoNJD5%(T4!mj(Y>Yv|Q(!N5iXa2cbW5LP$pJA0`hxCtBA!K)<(K;UlEI#ED>oIyHkS;thyzDvdG&`IW}-BS1;hxc& zO!eMp0WKGZ{!w!|BCiokIvJrpBHKdj;qT-`EDGLG0I8b$!Q=)!D86G>(%*+<6i zT2kinroGci#`){3Z>v-XYCfhr7m}8)?>%1 z2Vb_E-fhm(`lGPX{wuq<-XIIS=SAE74lh_kIeyDQ(iO&sAc;?iBCUkB)bXJX*?xS2 zI51~Ng80JLMYQ*lut2B+W_+7*;IX$X0RA#Yb{Qt>^O5LQOrjsAiiEK?ja$c61VR}) z4e|n_gwjFKnDC^v9WR^cQ09~o`z9Du90FKdU+3@Q*u`cY)MF+S_jQ|k=6U3QpU2L+ z`y_QetlDM#(7BUm^^{=z_O4ak8iiEcG2YyF$&@Xh_Aumj^ZA%YOcba0k&(Y!zPLp5 za)5`sHqecZA3jSks2%59l0Y=F@P+7SIo;QUm8#$WBrs5iJRVSMX8i`qBdNa=%qh}U zeUHkjK4T63mHV5}veD3Fh#{chw!r0xT)Nna{mVQSi~gP_;`kIj>3(XW9%Q+`B#RD~ zu5?UlcDU;OQds&iQHR&R_^C~b8$f0_KO>#<|gP*9=E5Y%r8 zDW~=;LiJ#3NmMl8Ziacgs#Irmej6~@S4H7Tj1P_OR{9;H$EMZ`$eXjxy4xLnoN@}52luYeziL!tCwKl2N*k^pVir-b-9rcK%miqLf5$pM6q5u|; z-K)Q*#@=zaJ@}(fU%g2&^{V#+#iz)dq`*0`!vyN6$eRJ%WARjIaKQ~(S|xlH96?ln z_CS#kJRDllekH_9VZ2=9LdO+()5D^gEZ=~r4^ zc|3*plexhu(j7C{cc?HGFDLvFuKIzvV^Tk*OfLki96%>bwX34hwH}$%d7xa)DIT#IphNe7MyK3~yxb4^Vn92P)X_H}lgC@g{Kp3PbsmZYR zy@Q0Cn!{MsBp9cuNeF53ZeM#LJ3jgzaHOyoMXsb8c8F7jkl-IcIj7(*-7J5qyZ?;k zp5B7HnTsW|+IZJK>mNGq4)&;57+5m>f43uqC0bWF3l+*^Yl)0xhplrR`Tn++3!@nWE0{cev&;I!1_lhlnZ{#g z+{uGjl6elkx#bRr)h0VqeHg1~dU(Pn`Q1M2yG>29pTjyN4`~rb!I+t9n>l}HQ5;vF zJAW+-11do#0LmJf64e~i&%M7$-gd{njbs_IW*A-zbu*~V%*N2Sbzm*g-tC(ojgpk& zim;xw?NSG&5(k0XeSnk9Dd$?Ysce^Y02$v~gVuZ%8cn^Bz)S6J&EtYP)J`vGU$c(3 zPSV;0tVW*5gT97sPX;$S4)Z-*oVpk_vP_x-2RqDU z&uQ?$vymGc0=8S&JJ~5|z%Yl?kQ#X{iVie*-_HNMuJk?nRAQzH7|%aca!qm9|HYKs ze(}=r@^i6k(wmPU;-+R5*R+V>;}d`6IEcaScXMmh6WBMn7L-u5&T@+^yM)IPKJbVj#fWj@k+Oom$r4 z8b+i>@iXH^6GWpdI-%#{a!w!h=}(U!Dv*BDS^Uu<8@Z*9QBbT3E3Ge9qe+z+8)c^_ zAhtkpmWt&-+qTZcsAn@1I-CCt;O^fn#yO%(VQ6NvL&gr%U2>ViRWp`ghz$LIbB=<=!$kkE>KF&o8WaE}H+0wUOo z0A`UVh;OULB#)7~_9P|*y+RA57uQhW4KgyPEMe3#4S?ompGe(Xja_yDh0i$IIO8ssz>$ghYe`YPDo~G;e_jDIpX%eA55*?uI0la;Z+QKROQ|Za9bkKkH ztJG;?p;h2_Skp@&adFeLz=3}UDC1(A@~9Sr9;zK7V`y{MI@xZL%FC9;0aL~{Z8=5^ zj?pJ)U}_hBy>6=hYvT-8=VH0*$bEn#^FOVGX576w_Mb!Qovk$-xL*PqDoKY;d*U~L zMMcXnng5*1(;x#L)j3tvt4|XaB9Bvyep0{W~?a9tj3MVn&_16h2r_<}B(ocLF+*oT1XMkNq= zQopPW;U<71Y2<0|{IMaJi{iJ*ka}!bYE}PMOG>U_Y0|r~O+sc|JuW0MB@Il0I0|J2 z+J?|#D}A0V&SgHKW{w_pY&^rDG>MyP0x9)w&PWQdDuRB~%-#3={x9>g@{0~JViA=i zqR4jdnzeDBesWn&{g_Isq{TIiEomS@Z5!~ZraoNata9q3+_04>p<@UD?E?stCieRV z9|(Tn77i?3yG52Q}!Cvig&8v>P; z6g|mv=65?w)zdmeucf6}Hg*_1PpXQ!Qsr2_DlnY&OTt$>OKq+AOf`r!jN>$Y@$WXK zmSKdGQCDPdjm*l8-XpD!3)0K-$z%|+IeLz+4zdFbRJ{~^TX>}>lm_42i7sXPnzwEO zxA-}%7;tdij6&(}MZERo0XaF4R9o4 z>Alyt=#A}LyCM8NahkTcCFynRlf>qK<&0PYRhcEXwCE+MgD?BYr4F@JcgeI;hGu*W zE}?|szzx~3Y8xHF|79Lq*Py!3pY+G4pAL29%R`A%wKP0qxS&JDQnO!Hom{qs-LS)O z@kzH|M}8dZGtB^=2s<&ebP*xOd=zeXNSJnu{AxoEM5`BJrey@FVQt2tzrBrfoU!ge zIrfUhuh8rh!ho%(9QO6mW;l3PT=8y5^hlqZ3z8m?&5}^Mau=>#=t6CY0dw?0T>($W zQg>B`#W5trOMg$gT|E?oMWfe|=L*s{UH_F+hOs{6esB zAH>YcXz6*u-bZo{7$_ST0~xq-*(I=#PcP(LWh@vgS{q8a?DSzb29=7y^E-X@NRcML<%B=6a=7>teT!enS6+#1$Z4#;@`6Cy$_Mx{}uV$7spf4neg^X0WKwyhHnaiPnJ{ zqU*VK{u;5lxoJwQDLGIi+ePu_#PPushRnJ+rHxLfl>t($1XSi6nT6R=)7xG4DPix< zCI|UUg?45gFHNeyh%*2A{dj)y%^CTD`kV~X26V4pFG;Wj(~SMy=AGO}?<3tN>qLuZ z=m`nCf>ROuiET&=6($`nfS%k`S%m^o5J;VB~eV6xAwo@1`T%C&ef$YRk5r_mpxs$0{2AEBx)zpp=T zBi+&eoc(-6lei<0d>}obxGB+k5%Pzq&h3)p5ISsO+^sG3gr4l}4NHnKICcKJ`$^*K z^A(RBH+t$MF53zeO_Bw@tj$LctMq@b)pENYHf3)OJ#j2#+dqm~9q(k86isyUf5(#tcmQIUog9*u(wYN6C=vLZqBy3Ok&G7YH|okv4(kD|Q`o6P zt=9>y*uPt&`%4?LjW-FAeD*(}L zbM1a9D~yx&ZRCA?U9aKGuXLLLJU;VF0L}LI9$?Es%36RJ_<{lGk9{dqga+1FIC=Km z_Pis}D6|xg56lc(CFfk+UiCBxWP2l;CySnQSQ7XQ7riVt(foA>ckX^81KKJ@Md=cj zPrd!^X3IDPH=hbbvJ!3mO{X2#KWs>mpd}jMLT6x5HBAHSoEMnsB?E!VVpNzBrzNh1 zbLhWd3SH8Y`@N9edYz54%Ud8eK6h?--~zYw&O^7RQ(-iV-?p7eO(iW+#h8Yf(H3Ri z4Q}jV(`11WnIef~ce&Qka8|F1=mHWEpjsh?uFe1005X+#T_ToB4}egWllc|?B;|O# zbG{JPSXS2MPJ6O41&qb=s*WUvl={|WSJiKg3|X%(u9BX_z5~SADV6wEZ3fS!CDLzeB=9X+0JB0B;p*mkTia!qQ)B6Mz+tX_edvc6VvjzDc-@Y^ zL$dF@JCBd;gdM0d3a|@YAwjQ9>1L6>xK6%``d6+14WY7~YM<_TprHvQB)ngdMlY21 zg*2;AK>*v}@c?Ha2;(dt%0+WXb9|<)Zo1m;r#DWegS7xbnhu)GND-ul95TnUQhk>p z^H^ZM#)F-GVS+ZWAP}s`f|tkwsii_n$E?P!BXIgw@RmvD=esYiV9i7efEYe>KNiYe zkwr&2Bm)j}Q35c9zb10P`Hl{`xen-{gJu0uEFeqk+0#ou2&uX14^?@5DMSxu@uJ)k z1WiAxwY&vcX0cVw?V!5t>rfF;;7z9(z^1|g{Wyeece3^2AC~nLNoujbt7GI{6cT+q zj0EL%Jq8ppP@Z%`VD|BxB}R?EPV^kgJgToTFOAT3ngYV>EF-*5R`n~G@<|#Bc$|u$ zrz_O1Kdi#7KP<>}Di_dR)8rK>B__zI(d&fLH}eVV*nSCyK-h9Kr)nO55cM=lhD87H z>+7Z#+m0afgg|~qN~u91J|K)}KiZmb>U#g>A%|)Qwzs$68k}lpZCML|F$rR{4nk5^ zRbS223tlu6^^o5!kq7T zYz@{QG4W^9a7=z~30A5FJO#jX5DizNB?XDkJh)_9{Q`l>j2i7|{qrfp>sTxNSMt}@ z4Y#BA6ypC-84Nct{P{p|4z?`%C@|G?a2HZm9a;rT(cI@LJ`WsdcGJ6n7@C}b`Kql$ z2C@s(feQ5eFV(%Ar%{5%U|8|t4FjsfYgIH`_{=&wBx8xBF3FfCnxkjZ@Ax#;e{;Jv z;bysYEdW9?|Fe$H|JJtl7CS?que1MJ*#_g*$}b*=aE{m=O>|=9z}m$Vli{r zdKPv}l$%Oo0#4Dv($b_ZFTY)hfRTIFF)c^@=h@1JkF8~bFL&AN&e&u`QqFv&!yzu5 z>O1=c>1@@_mM%DAKNpV^lk97zr(Kil-gNDkeuu=HMM^>S8pcPI-e!$GZHKtG7V44e zse~xC9|@5=rr9}MWoXGwh4nRRg&Esm-RK1T9AwjTb8f~H=oiGqXP3St8TNR+!d%+* z>b)f!b`R!6nBHRJw}{zLySd8EFDFYsYg2`NEA;C!`+BcN)BVT%9t?#Hhq04+Bp|Nx zw?_JbCr#cj`HeLzF1KrjYgRAqSiw(_xQ7_=HY?o6$op75znt_)?FdYv6A z`=#~eR_lp>23wvNiM-w@bYb?Zz3$C2 z7r)lG)7x2E1qruFu`vPg(1$-CbLN2h_LGULOcJgJQNJ?h zP!3e3w*!xrCl^`2TyLSuTS3Z9-IZGGkFsU&Yr47?^J*246sX<&MSQXHc4isU;YvU6 z*XP$0jzAXT;~e{puju>Y(Su6yO{4w0f1yt}7f=3lwZ3f8+kL%fsetvq8)}NifW6OC zoNN}zpbRW;42i;~Nhd|XX}0|%rI;Sc04v|3d)G<32>fkTD?fHBxCaGAQCalY#0ROb zQAeuvoSZD8fp}gsl4=U^b}(609VAHDgJ3XHl?UJNEqiq#CrVJ@FjAG>{_w6dJE3#A z+%GJ=wvHk+Kg2{G9+`lPxr8U}n!3^qt?FZB_>`>@Fd!rT8>uPgBb9%!8I~XMYgR4>Uq`T+VI^pA3 zrgmBm@tMFnCtz!dWMI;Eq4m+2W5Gh3Xv<-~niX-LcBgiD#Ux4|%j+RK zOX*sDBCS`It@Gy-49eXubNaU~9U2;<-hw%8*yriG)F2|)4saEna?f239?LU+=<~(S zigEau>|tIL!ew(dd~pb3(E?yiJ3w4?+&nokMg!+7v<7em!y6NlXdcEaYAZHRO#KG% z33ssLLk(ij#{wLY@RuG_?*eFdqmh66ISDN67y85y@nyATP=M9fZ|= zw}*n28{3$bW|v4nmYf#6T}*$~Vi>WHnh;j#at*0du*Tga+FhOn1Xu9jbM2_s|#{z8Jx z2|Gh5W?XtX8Uv3*`=?H@+Oq5{81xjG`BzrysBC;Ocr4>4 zAUX{>iLa#~C+d>s*iv;-8e@6(iT4o`$S|w>)bRW$(b?GS2xwK=(i$E1 z*)cYyPwR|HQx4)<`lyRF`|PRc@wI>D6l}DRw$r2idZ|s2yFnL_7XY_7mVEK4a-Lhe zhMWb7P3HHKm@buz6P7~1@$tTx9>k>$CPY?Y22wauf`GLfnsj|14M)ew5^zKSRWl7E z-DA&qK$V<77J@z8nrGo0x|ROt`slSu{%6mNeW7}Yya9$ZiNJM!B4}-Lo>C|LOcdJ} zmYD|@gK<<9vnxzfF=hmNZfRPzk@vvdM<7rqpw+KR#L-@?j(6%tZZ&-K{O(y0s>6$SnBf8@9peLE8UJQ%I^vqh(YA)o37cwUDdT*{TZDA@yw; zKy1H-D*#Zfdyrwv^oRPX@tL>G`t0KM#KYfle9>WfC`knK+Dk4PSIdK)eO_ zVG1zI_)fyVayaP|WQD^s#_F@NZhVn9#*`0Lg;Jzqcj?ZAad z0kdH>I*g^}$4wBnk7$QsU)EosS+n%h_k<+J;FhDkjqr5It~Lv`F8*SB#{1Yd4?PR|?+=E-Bz?SR6 zAM#}8Iz*`_MRI9*7am}e#w|LuibaR5Y(BRSxg88NCY{`y9zCZDjK0@ykyD1g{hY~b zaL#loOYsAF75N=RG{f8~>U@ypO4wJ7UvB=*N#1U=S)l!C=ZpQkW8Ag&RIe4GH6Nj} zm!lr%<7vH-f5!W}NIy(7QzV|MymVvLw4s&*>%n`h;hI+qz!drSGaDqT(%n(^ovNId zYcOCgIcsOg%8!c*?o(34d%AYEfCZMqsbU9Cx4bq678TOja($orF>9lbQ$PpnKAlkg z_EGKAdi%FrwA*{!h*BORfIrfjMaaJPC6@<@7NI=({1x%fc{EvTddc^l9i}rdi zkZy;v=@dMCH0Pz-H*;(sK~w)jvvVYEbksclqMutc9WDy&zFLdjegwg4FzR8|3namm zPC(nE78Z*A-D%O_5@-c{z*WsSQq_@J;3===^^EsW$CY1vAZ6v{sJC*@U5B`yKYCC&IFL^q4%|BjMKahkn36&(eG8^6bB`-z=!wgXk^%OWVGz7)6!HV=w zMgfQFV=zLgrCEVHBG+P3XT4;!^8-eg48$w`+RU@2SK2-B^uxshH&d>`cyR!fT7Ini zC^jLWBE9csfP~l8HP9Lq6&w3aLmZ3oTc-h0m)h8s{S5FC5!R{t>kDbR%PUsH;-wyv z1t23e2dE0V3P6LE$xOK=*>F%BY|kWArv^RPReIdBKv5B)=9q!G$=>6|JwyD&!GDu@Z(A70?GMgEW(MUA zJXNJ=4VYH|s!c#%>VT7+Gx(Z6J%J3g6%I=6MM+y|#FdVwsf~Zk!A-vICk}sD3cG%i zP?&HW^HiZ;6Jo2ifQ`o-kpeP>_3YbcE3r;d z3|U#C6|7a_L^>)8Uf2CA2XZ1ZGmugIJEFt{181}h&eZ~D68H9#AsJyogMw5 zBZak+mC=9Y9{ww*A|kuAj>JG`b*ssyjFz&H)ar@QrHpT`AwAtj`g}L?FWSe+Cul`e z-H6D!CG~yvrx%_;#r7o^(MKd%PcU%#=Z+W^9~Ip;P~KfRaF$W_Y1XcT2$!wS zQT!tP8aEe^vs5tC>sf~ewpxH`LGAQ z4WVpRJ@h5!;D_Sc^72YUJJ+lE%^xoOuBMlf9zstUp)F0AWtJ1g@6UG2g)Ew=hNN5( zi0_S#c5h$*>7cMg1v(jZNXJpD4a?0{zx5P-W(@gt$Ygj#N^#tK7c~J)=wGi`nd55G z0hGp$QrJG_CtEpQNHq56f>M7T7UDXO&*F!Fl*uympw3!XQ*n(-+nwU75 z{*@uIn^^%nQ35|043a_*Nim2A0&wUXMe~-h#VG2JCifiSXj5x$$i}-F@Ku_;27d5C zIKF)Kgu{90yCvGN6fKS1?vN_#Z{OJ8DaPcTy5#*K)Qm?{zKqP3|eYt8GcdG!lk?@yeaR_81+40bTS)5HCgPh4jSMm%nIMN%HZ zvaXUAD(BcjuRf=4a@jOBX*4|Hf?rb?3-hoGCy@k#VX{Wy>_<*S$CPJh1U|B~zfecz z<$AIF`gXpGaU1`~PQT^CgEwl1v=0omAMvmy0oa2r=;}c9^zq;Y@Ii-z&)bF^69p5L zlK*TYuNqLGmwt(wlPZnF4^+6sJNk!vxw#yC<;>uG?zgVzZbn5Fyrw?a7@np5fi|R4 z%z~4j0|H6GZ0UsP;NL^To9V1gfU^O`cz%1FVcqKueF%n(XZ z(fcnyRnZWG+)3bcG_{uXNNi*L zQgOOv(XfDPQu{6KV?$G*qeBncH~+^1X4nCmm7VI@6)4SpMY1)SlNT%oUGY?r20f-& z6XfMfqWPrT0F<^ud#}#~*VL2f3NnDv*)6%sJ=nPs>PGOcYd>qeQj4S zhs7DI#-{RaiHKZi7y58aaD}M3Yl5&^R_3Ri9dT)L%JqPYF`xOofJ+9aP7pZ&&rZX2 z)9XOpucO_DYyHv~Jz>ZCks7vJR)-&c3tbMTfi0G`=E&pJ8SG1$X^iz?3;4a$K+r_o zf{qSI18f20Gh%Q3EBE1Fx%cFs>>d0N4n?_h@5gUkK<{txX9+6*CG2h+1EHx|8%1{p;(1(wKRGS10K;}hcn8eA>59uTWXw~j z$X#23%_d#-#!b>hzI)#2tY*=rkM&4UJ9b;oqTiO6^ewQlK@zal(2jn{zN)YB$5Q!) zc62U)rk>-0#{1ie3d9G*=TU+A5eB)61bFeW{DLT63aVj=-|e>^B5VUnkZ;Rq|B=h^ z?NdiQ>H$<0;=LkZW1j}o>ik4NLblkOJ7=1Z{svZH+Z)4Sqc-|+(7meSTI6hmvJbbJw|%>~Ch|5`LDd0#@1 z+~||JnL|$n9-mV(ZaXekSClh8(KueAqpR-Ym1i0Gw5wz$gOZ}KT^)H5VV&}e% zuMR74gA(uDSpe;*SI%8PB-hIeu%G913hd7^g78Wai3nG1Sy`Nz0za6DxQEd?v`Tw~ zfZm0WD=&MPK{qAmJogT%{!Y||FCz8qO0qxo$|{ED$u;r-;0L5GB?871uZpMF1hpxa zM~S>usF-*C&%R2tcuPE*CmO}1p{1FnZuK~FbV0L48=Zy9m9@zZcpYK`qiQ&UtF9ojUT z*m<|qw&pA(;5t13E1<|!rS=?Ls$>V(c9i}%rvmSZ^UF5aZ00kHx z{$*VSba{hjCxwfRk$*N&UWwc*Kq(FIG4w-^$_7`wd53Np+IQ){s2TejF|CF#Y}jieA-)rC=u-f18|kkIVMKPYr#2Z1$8BnwY4M|&3}nGqumP}Rj{v{6 ze=~6i45|SdKXao$f#)9Zo=BlH*=vcCr2WhtDh%oCqXuC$fjS$lc?5gBjXJ>)VmrHb zurjPN44ERoF#>d|P(_O5APxo)MdMvq*Y&7iAl;eQ1>RXRR>Wf_suNm?&m?4Xi39Mz zUSFSW8Wfqn0nRgB?#7QGzqi~S#lNZsd_Zd@4W=$YBV#5;d8P=~o#0B3-*O9vgYOpI zs&2L+6t?igr<#2NpmQ;_{khf`%S;Wl#_m+0oMI3)KP7wK`Q|lmYHJhlLjS`gZ0)qK z8OTjDU5`7%MdULQjS`bI7$oj=}D*q)EHxrPuEL)fh|flO^Msu<{gRt zs2EE4-^{a{i#Lx4cZA}*K!*?xuAIQ;QUttnB24(Ye97n?cg z)tc+aI=YAuEo6FUe9ZJtPN5+Mu#;e<1p#FF1?o-z9PPY6$A7Kl2aW z3twLalyq#M*0@z@g2f4xI?k-ur559iMqe9uc#B-Kift3v(j6B~drx%9La1VZ4=Dql zhh4ztzI{Rzx8)?_S+)RaW7?8~@L*vQtt=@dK{ZNfMCPgH@h-Xc@5%J=_0+31dwtX} zfanHt*kgOY1_sF%lmdv=_DbM{h(L4*aiZ#XZ1od^@D-W*;s(rTL}#@eJqtr9FoB8i zKpAneq7lh}{e#*zBvX+XQ_A`M`;Of<=obd&wt;OgpmKy?=n2FgYOLdM7csswaneULvTWCgo^zmFOq9DTtjUE=C9f z{gP%2TAQ&$qVLd}*CvRR81F9GK(KyOgZ zGmC}5D}fYER&N!M&w9)^&f2G((}$Ep@M*W$K*5eA=6(jRQ-C`lyxbKK3xy8r7kEA9 z|19KZzRUJQ-IOc?S1BpxH-d=5$-7hJBrS6KNgUFS0;p|mI6zt+kL zu}aMrzS<346v~&|Ah9R~)QyfXT*s%Y+Qpsjp@woP1?m7Z8)-y7!mNnVbd~0|Dv3(0 zaIKt@bP9CI_X;wY6hke3P7cmB{oI~%j6LkvEU=IEf$LJL7ChGr)V=R!`sQR`t5feM zjARj6=pNabP#|h`+LQ~H{39{`OpiihXc!0?4=IG>+Ci^=lICq@u=~N)Kw_}&J%Jip zJgJBVtt7OAe~Z~RlfVmw?OnVDEZe&W5(OruUKITp>-nB$xkXk$*v|m({6;YcgvT*f zNVle4{c`(UvEvBeLrn(s1#|hw6wNong;t*$*hQ0eZ=$9=r;`*MEC>GInlDbiTI)7$sOa}iAVI&Pzy8yVf zdgCZZk7_5xhBoIEL*)<`JqOX9Nw*=XQ}Tm~k?d~252j9cKSpZZy{8X_tuJF?gIqK0 zevc6+z({^RlZ_s{_#LC-Q^DQ?jCrCJs8~=-K#`+`Iga7>v=8v~3nkcLzp)^5n5bYX zxCuVZ(_2}!T)wnKJu*WT|FxJ+uO=qf>Q#uY0n8ZRP9kXOch z5lILrR{z;0>&Mm@#@4PKLk&b=Iry0HP@2^G)*xLO1#TI>$uz6R=wYC@sGvNC-^hF% zlx}SJU%qKG(VqSTX5ZAhhED55kgOB*?p4Nwb>!mr&5Qs?5M|FR*>GPiuO0t|f#&3g z*ELE`qe;U^o(Mz4^JTeJ=yC|>9Dgktyt~DNV>qjsh&m#o-_HLZ7y6>s#=?3;J21zS zw{gT+)zd84-4p(=oVH}32)MDuO%7!?mgBq*XWLQ1N1FzFSfH z9O4>`uOvmc3F5!%pE__B*1-j5`i+KBC&4gI#TWC;gM2_MsxrV)JzRtS$w&g@fc=D8 z&v2}bcFdMuxJqGDsr)rcUJ=YEA?fweh-EzR_*u`PEb<>}*j?;;ivDhnP4j?^fHqPE zA9NWKboqWXh$(%NBtnDPx9-gIZg&oTfQx_)=Q;Y0{rOp7UV9kKB2}23X-Vo!jm669 zZ_OoBYyNhb-Z0Uyg*j~O1#69?b!D~wPl5ooVEaijv<6#(B%qxKD#|PpHbQ{e3oMIP z?vqr0hOxXU+qZ;An=g#1p7-;-P-*h1W^*AMK=nBaFx*gU+Ti7$fY79y#3dR815rm2 zU8(6$_FoSR_EAJLXW3Kkz^oz zza!!)4Ag0f(GQ`SRev*Y9ap;%{g?=c%oNZ$Hd@e^c?eM#m(>xX`9fdIUu4+Ul;uA= zQi-0^`m*<*%a;ny%w!ZWw}X29WtfwqWo8SQ2Bqp)hAe$&2k-IpRdgEqaTauo9TwexAtLSw0j*w)ZRekQT{(;7>Cyl5xAm&-<2sCZ1RKFNT*5mZ zqwjZl*a7CU*`wIeN+Q+!7kkSk=Df5d=5iSHH#X$)Xh4cSejk07;yk3=y>qPQR~j`_ zWBy7c>qM(e}Oi=)s9R+VUSF&RvHytqZI z`7*SztK$@b;^-qT!QA|jU8=Zk<_`;1k5rWfVg-Htn2^LtCDETxgcE%BHhCw8!uX(r zOPAVSoflM^1XByJ9UMGQcj+qsNoUZ`16jznx4}+$1y3%*RYz;zE4WfFr~C9uaT3}A z=G=ekWaXH@OSR(*j_BjZRf+9+hG=2j%;`~%!b@vqAaDiYBK6+V9>yUX?AadX3D+w@F1G3F&In- z^htxE<)9?+{;=rgD}G*ir;JQlDtr=-Nj4(MVe@qb1Gi*G*N3OJ{fJ7}!;&Ca%{)m+ z#~P?=_^H@&^RBeNbHYkXvm!5oHo`6*7lY$5>Z9N0`Bh=9p9!s}q~3cA`7WV#wDR!5 z?<4xcK#(25~34PhGbN{%m=6ednZi6>C_yw0yOq z2wFu+jI6f8dXm+;uk!g4lSE*4v%z2SaHbCf8K*4k$MI_+?~9K-xGnu-rDxviAs2CKp#3`w&PD4Jg%-SL?)F_9xM2~67a(Pa50dAwX_#!L&pN4*J zy5{vi`SqTD?Rk#=`@7M$K@dE+#IF|lJO+Q2q9G|*522)%2idottsG)r*|Y;>%@Cc! zn*yMWU6i=)V@=R^(g0Fjv|%m{o^)tqUYzU)zwV@#h27e)U-FiPLm*vVjTKk+Cw4|_ z{<_<$GC>U=kKCvmtz9XdCceOkcj+EDFLwdtennLvTv?CsY>5sbQ8Q&z=|Nb!H!h!xoXam%w`UN&s1jR&u!)|1;Xi_ z4j5ZU2#c74+x^p2bL=8?IP8Z>=o;|bDo_hQlv439SM;s5+I^S)raa!#Ua6_`6)AkL&M!!^iA2p{NVz|?7i`tqdhHb?>ySV!{hjnC;mZqQA^ z40c}76$Esh~Or;aeeXt5*(4*hO<8VnGL5U?!@8wdBG`QS8)3K3UK@6kAI=hQiA-Xl?%VG9Z?u{2# z)Ous4ct7yX*rC=Rc%8w6F%4`iuF4D|y}u}ie*+SPi=t1z-W$k*bg8(5fGN zFa5yA_zEqTa*0M8bl{B_E&s7x-4Su#99n>8(y$~7eO*EvrSC#S9DKuRJ$f4^3jEchflEXu<@iKN?hl`iz258svgC)-613^-04E1u?<2*N)JuwFRAGX$3C`(y; z{yRE0H?cocC4UH)krwTk%M|_mHQXi3-;H@GM$2gT3+B1_UJmQ&ITHD zfBf!W9I~`j>es4NHw4LTdBiY{IDp8=vukH&6lZ!x+JY~)Ta7<#txoeoWS{bCB4lL+ zQWv8$si`$;j{*A;(f=dq+~b+-|3B`&b*EBRNyuRxDaVjP*xlV-ayzV2&MbtSEn!Yu zrN|*Gl5$!lryQ2^*~)1Su`*1=*f29=vxAv!_wTyDzsIBh>S4LA&$a9Qe!XAs=j%iL z26D~H^XIAzmdE2fcO8`}SgOargh*vtgKal}d~B?(UpvjA)op;|4x;y%( zgjDKJOeiNf>}B$T^^{%7E_ETEctD2po)<*O^4g7U|M`&VoNp9Aw0CnofmuOnY<*Rg zHE8;RFHA7P+(7@+6ysYm<4PM^at{TX7->M3aVvJtf~vFwEsirN46DpUe@OtD&Wj>q zBHt~KdMvrmv0{XA(Lju3uEf9Vj=>wz;A?4Fs63BQ$w;y;^_`uG>*#Q4Jc!{Z8OdJW z=Q>_N32vxu>zomWe41nvj)=_ao-TGY$e7SFtfV3Y5gBjA_v2;eFU~~YOdTcf=BpmC zG<%>Hi{a37`v90M7JZ;AceW>so%|dF*Y75^G zUo@W(b*V|lCo zUn#`>TISvK0+`j_9q9cQI2|B_Yy`o@>P-BnGENU2F7^ouPrUdpPZggXqR< z1xkyA5%atYRewF0TWK{8uJX7y-l~y{$`FDz5QI6$=bsK`*^OjnAPIjoFB~8!scmz_ z+W5{3lEK2?#qTZU-35w0LiIdf%3)##rbzIS{&Vbs3efZP+a<`!5G{a^jql3berqk$ zkd`m1PwAK?ql}glFSJhwyq3N2#;DIIJ=&DM^}tw&yi7qIx|_Wr3Uw*)b5;$#o!ael z7E!5p9Oja7JU`1mpu&4zuQ}eyQmvxTy8ImS`DHXGw>kdQk)KIgZC{2MCQbl1`P_u2 z6R+~T#vnt1_lg+?oAalVzChJQzX@Je$Lq0E8q6AB!g0*&aIcusM+KS=%0ikF!Q@2l zR`>PRwv5fesFPXCS0SoYQ&V#=12% zCCXoneSH(ZcsqC*)lb}AqPWsr4J0m9pf=ra5`9rsK=`B6H~cx02XIKY=n@mI?vaVp zOc5PL5+Y4#D{zT1UJ%;N^t3(Sk{i|oUW%b(R)T(3t{!VZi_Hk3Ry>hK%Ly#jRh(to z>+nH>mE+e2K#cu=hzJuVQ0eVK=Wo8SQ2#CXAG%qEZ#IFO3mC4K8iwwz-PyoV$s*w+ zrWmBKoNM)zx2}vxhO%gh@y4GJrEcTZox{b;xw_bC{q^0F7J_tPv(hk<`kz+I0%i=) z|CKtzd_X=V`L5`Q*&#Y{|0HDc0J;&ev~EA}^$2R_du`nB-yp3!4L07492-Q$%-6V7 zdp+&eBIKI*r*`W&c$kJ`_kWxYviYffAx^D58t)G7`su0Fba1KJcAw&hE^o}Xr?%vM zIF6%`%0ajv=LFK~dW#2)NABirdUjltByhev*_7RJd30#jnogjT>01(ow;b0g{8BT= zfcR@K4TER(X%nzKyQ@J37={n{Kc&<@NYq)BjLj$?X5Rk1S@-i|Bq?r-o^TH0SCm-w z){(|9zwv%?bK2)4F>;{3@k@)AruthY6P|PibZe=jVxE@a`@KOvoN9>>19d<9I}$Ol zxt_YU_65&>&(D6r)0Do*T}A+yoryc5#TK z&~uP9UETJJ(_Ezeg|ghPequMXvpuuN`jlT)k>jc`CSXjw(G0}xkUcjZ<-|u z5lt2>P`N^HP@_%xRj-YC9aB>RBnfJP!zBWrkD-p$#a^w+NMLO1o72AK5PhTF z5gqxdc4}1;gl9EElHm3Z_)mf>sf{4{iUxjHJ=VWr{qAUMT;U&-u$?Q0D}KYfII&r) zra+Acz$VP_Nt98aotIxosiI--QNSht&9j$c6B77$)=e?(%^}(nAJR=~GMA|4BW4PU zH=psO_l*xsp4|ngxNt#)fCKxmAXv2vw$RFkBYdp@lu&S9dsTIZ8BQ;m@%FIqb8KR< zXCq|KXT&MV*yo{J!lvp9Vaq&_2@!uO0N`{4PR;y~S>^Zq^^@0kanAolqXozo`CI(; z3H?ay!CBj-d&nQrd`ulixa$FzPZvgGf1#$eyIS)tj|9oeHYf8FK95yDYCge~iUWJ1 z>=D>6;=gQ$Iy~>)%^A*PLfCA^78eX1#dRQ`JMUHLXU8la`P321^aKPzziu~iUV?FS zn18fy2)c}5kfzxER!>F7Nxaogl=rQLt~5g|I)Rr%KeO}jvb4*1k5R&hpwGzRl4)wS zlv*q+L5wWj%$vs~I&{tM{&DUNRk^YLo7jw6I$!vPDIn1o~Fj-`DzL>L0=(55>t`tX)zT$67UYTE>a-OUP*@=fH+LLKwwuVl~Tq+gfhtGo4mu$q9f_?&GD%au@n}{g@cj zjMEX@zaEn%fPRKlU;_&j_sS`3TT6xSRiVdHB@=5)Q-Uc#_RkViM?jaSaq;62aQvT` zKBskG9}7HRt>x?aIZ&hO_5&(Qjq$znut-Jn<3P}?_A@0j`u8MYnrXi!gQen|%EUou z2JY)A4Ke)9ovbO^r0$`WmXOtD@8W8Ojh`*nkk}hh?p1wmn2@Bw4N=;zI-SuH#pwhu zoxcs7!>WvZf&AxgQ?DF;sZFD#JZv+X)E$O3j+=qpm}m(3YlEwD|7??KjnoU1FJGiP zA<5H;i@7sguy@_)IdgY6cTjzx(+LScKhR4w(WLh#sBL>P)YVz~Paha?3vg zjyOHVQQT zMB0h-q_40Y%Z}%5LdwXKu7ODHP$1j+;oyRL$5|=-&Dcnx_Fc!E#>S7nuj8u`TQ)w0 z_o1jVYPqo_3QiaeOUY59ovXDy@8Wd&i#Ho#TL(&ox%G&hcFro;$ZIh&PAKZw$cl@BZU$gPdur}w2a+09 z^|MrdZXzGpdwlF2@5YWN zwc>k>jM2%FF3*RvUl;ckg!w%FQxg(rUT7i>0I`a6x!W4(LBxmVNmGkgpLb}>(ZYh~ zc(5lMw{xG2pQnitM z1MrnzEsn0mDZ;^?S=}LpXVC#0D^+3L5$h!v)ByDMRTs}Imkl>xt0;o1%=HxaJl(1l1^Br8d)V z$Obxl*6`)`muTDkbe}|WBG82HjOhJ!qgPP+Y=q-ZINwVO0n$DwcSmu)A5wW+0q;e$ zJT~+Y9I4HCI~$ZfT$7#Y^!kh#1_~4I?AG=QpJv>{Ud*+;F=9lp$B1JKIo5TZS2M1k z_cv!ViSPN4oPRj7n3VHpG*4LI^uOdL!BRfU11~VZe7nV6U&Nmp$*C*fV`vxH>Nfrp zxics^f*z;!qKk4`7_hl6jxBJHy%pIZU7$YOp&gHOd3&?F5AmkKu_=9R2dT!7tSnqH?`xPz$hPn`Rk-Loh7^~qPDb2=nO-dDJ9$y7DLJGX-t&1gPe?GQv z`O2J@qLP4cl>ZNE+|uG|c`eb$>np+u z$L*-pp{xypdn^hQJ6am!IRt=KDoh6O1lpFe)(FV4Vfi|-@_2oEZx}c$A4B*B$Cn1H zMpjP@*=!`|oi+#hKnnx`3Y-ZG^|7G~c_*FK(7Xj8nG35hdDXn1B_p#1XRHgIxR@uN zM^GU@Dtg{l>65RcTn8t#_+fk2X7U;}#I0b9Tpz|ix9@bu&TIu?y=Th)I2(HyJo4Oa zIHR}6Emr*}cO8vKLj!e_@Y5sK65KqQUG*|ZwS4;r9*b+*qG#~L72R9*7tH2GjxLGC zt48{t#7+#WA;*d%#&pL*64cF-AVOz}Y)P)Sa$F0G-{{`?T8Mo3&@6V2uO=k1od8EU z2Y4KFT?9)&5#X<%l>4=C8jA6M7OERs3q+f!i$I<&y~r-q*9ZS|WXp-)#!FX zm?Zzci4wj#Na(bz!qq5{GwKl#@VDR!8^5xQvgX?L^hR7t{MCH(DA}Zm@MibhlF~EU zHirr|F8o8|f`=x`RS3^st~ffWn>RD~)r~#?fZC$NqGD^sCw+f-3~d^4 z8Dl~l#sUfr*&5HobgQ#Jk$+BqvtQP&-nE0LW|U#k2Zgx;>zwWObN|d9V-4G@6y;G2 za~0SILXuW%ua~1BKzE$^$ZRx3I=&g}N}#;$!ADQ>eJsUdVa7+X39pP%mUuZg*a!QLinWL!EHYu& zM2Ezgmb&p_6Tw{6ZiK_F&TT7EyW3P2I;`Yk;cSZn-?T8{)q=pa`5PYm^vj*)K(lbM}$tRo-MsT5`x0Mpmy58H5t2+j?o7UU9B>Msji)3Vj__PR6 zS0B4dh5;Wi%eXL5z~8fY_lkw9;Pqm^h2{QF>2pt<#=8@+QImBf8LZ#(vVZi&)> zI8H~W*!6Al*2jQiMCPdkvd#zzql3See!&S&keSNtKpRusyh>+H61Am~PzKcA=J3bb zyySV9NaZP2_qxjvWt!-dHJA@PF%!b=I1>NoJA`~!=i#PQHN~qnHOZ2KkU0J-$^c^@ z)cEkP#Qoh;G(m)rP2@6i0f$z*wYUPQwbcCS^ZedA*_S_@tMsEKL!Cm#)ar=yQ>eS~ zsmn+*qz!QMxJm$p_iHm)h8w04b|?@QO2e=%Ebw7N*jX|NcnqJxY6wrO=96!+9vrDB zbIB_ZPjM?!Vf)%f?~jZ%fC`m6ye>frd7|ZpG8Oac!<#S>C1EQ0t))Qo#3pj5aY%Aq?PkQtWz)$^c0(Tv94|iCQ(*>4o zzg&ihxR&GI>U-AxlJwpw7f#+t;ye6_e7PB%V!Ok(j{s&hI#1L&dcgfa8s<1jl~$Nw zAhArs)k#wGQN8WZ`5Xl}SP||AM^pCVEx*lryw#a1IbrZxMdtNlezUu!ETjExgm6Op zAob~M5Qvw55s@8upqnhb`~|byDAarizEur6C7HphlJ+Hbd;6r~_yDx|2@rCJsMHF) z;as4uOmVE(`zthiklesbep=tAcJan%@z@d&+PO(3j z8ip+~OsIohLSXu$QHQw+Q7%y7Z%x~_ipnc-(f$3(G?w`sGRQM9B5mMZLvD%QU+qK5x^%$vl z;N24p@DOwZ0toH7E;{(5;p?0oisJN?OE}OAd3nBoc0s7dGBFD+s@T{iv#5D8B7Lb7 zM_fIT^AE9_nOvagBPCSzC#xAoWXuFHIq5m)=3SLsmSP-@ep4MMYxnw`b#B)HD71@pb2fRxVJXe-;{8Fxx|?aJH%o-$Pik0$5$@RYRgeA)OBH9Qb@^d#^#)0X(u5`2 ziafhU@&h=&dp|wRcoox0*i5iO0<;irGX&o!&8HgY-=oIFo@hjo>FlZ<7m z=%8MT&>T9WAWv6iur7B{UsDWLlF`ig^+1%Mz37V>E4Jf|cf`0d>A^;*s19KuQWtaU zRcC`KptIm4z{z(j1cki6qCX7>trHvwH0A(U92&&AEZJkks_1?x(oLkCYqhneB!Jz% zKR9h%0p%KrF=Y#R`>YbhmEk6#Mam<3(GgX25UNTd?g_u$pn3(|8d}wwzf3F`dfTf=jc%2P>yk(^L z_&u6%7GZ016YiXT{!F_$D>ExdoOf`vyCE*Z1h7&IlAp!I`)2wzbXG6{Zz!;NNCF1n z1<<98)YDjAzQkQ;+SZ1ej)b92sUmIrb4KS+fH8nYXSN2Ogui1YkYsQB1K*S7z&GYS z1ere|Nc^~RL1_D3pb2dOxZ+NSX>u~8LU^GS4DP9(kawq6=MUQq?F4_l z3Z<*16M>SYBphk079*0sAldz%EzfTOevl`Oet~Z(2c*K{+Iy1eN1$Y7%wxIaDGrWF9=rW;o+8{Vv&>pEMKH@A<4iH(o$a1OWvy!FF*JE zeBbnqriQ~!(NR5ndBErv)J zpiOsuEJJ{45O{q>%%1MiAuA{KdTpp$HiuQ+V>_-l76>7BoOTDSU8SduPUg-&1K?EM z0B{$vl$NX8r6L}sDXiY*=%Z3j2VS|-)a(LDNK)(|3~LFKcJrI`$ z%i%sdT?*B48@qd0Rv5V!2sWAqel^BzwO5P7hlH6ou2=zHEeK&VFwo|;UYKyrG40Ba zkS+OA2d?BpDYP8|UI?tR#XLYlzL8WVS}@Yz5zu**{Sfn%h%RsTv|;q3W;B&OzNp5$>U z8aFQvQntw%u2Z(j8#w+?6X+M%;Wn{c2>7wG-UFk{OYR#v`g#6p*06EOwYD$-wF9hN zs~HL6NZ(O^Aaw&Rr6;PWnUu>P-EHA`QJiYH(L?yl>375NTl>G_-$A}PW;cZax$r## z$*BRuwPQmDF@C<*fKT^?Uww$jx!l%3M$r~n8uhggSL2inwWSl13 z2HG2cqmdZ$Xxk&{^R-cP=*GWN5x^VHJfFlaJ$ZRrrquQ>un!12fmw=x(H$uIfAntK zBKvV_$Q_J(`o(N;_b}=Targ@ZsJVPm_wI=WeT|2*NQuOkuqA5cPeM^(Y~k~=D+?=T zmgT6DmBb8v?sJFj6HdzO>FTB5`BWytN6l$}4Z`$-gpg_%;|0IaRNPsmi~Ac6+X_#Xg&pfwJm@?iqdMx}6K0%$m|;kQI$i0knG?h^A} z+dMtiZOs~-wshbXe6kuP+ygi|1Uu+sVn|uJw*-tA{!nB|AQS)ZUDhhFM7{q3#SZcG zT{C?L_|=_uM}n$51rQjk4<$FASk!x<<;kE$H1Y(U_m#`&jeCvOd#lj2ZeBp9u|%M% z_|CA5^27(Ut9e*3;EP$y=aXYsbr6|H z-&~8J)SBx#81qeC;!!HWJK>mDrDF z9_@8Q7l3~$GTWBBxYQ2rIg&rcl|-Q!BnG`uKo$Bmnmc_OF8{SD*W1G=}rgvDI+U8pBRU%ws`AhjrGd<+!A4+R2c62PgyVSj1S zYWu4shKAuxHE$2t=6FWi-u0oAHe@)A_0hoEH+QCis{&3A18`y`D2#mBOu}pONEVRs z3w#Qtlb}~*y8YcLl%BvBabZRq?F?o6bM(uAd?zhlF8u_1RliyX zot6fCmOpcA4xZ6HQE~PYYce)FGBR?}ReN)0Jr4EN41oeUoWm#B%lrV>p+(v#X^}OH zlqNAoT)f2&d#H2hm)_gb(Bq8jre;?B<{dFZh1Yf*rmCi+R~U0x=4}>$0?r8nX56Hn zDccurZqDZl)#I9j8$yoC?JqF+yd`bAA|<`F3dwde4Gv?9hIV_u)`PCe#Q{)@OEM)_ z{C~Oz=E{|-*UT|vx4Wj=0W%)+>@V0cdCK0duoIhM6On!_bs-m->oIUR>Mp(3z95TdLX~a=QHtkYPzg z$t`A#uJ`%hZW!nt!3IaBxuLzvsOEfg^*AcJMx+8X@mB1}+9~~K62*I#N1d9$ftg+6 zn#h5Xzmk`w2q%=*B49)LrrZZ?SeNIqF7^dkN^K=)WuxQs+q|yp-#(Y)s=N%GZQaA` zwddhCpnF;C2e&+GZZ>Ki(%s%^lRCeL%MLHf^@5*+lULv5W6$OA-FiyH_#rW$(F()W z@?v1k#>M6!28hs7K?Dnu-D?g1Hm?Zd7T8ar+D*BA5d;~a)z)4^KLRU-Lj1x~ID!Ju zltf)HwbDh#w_XDp@q_80tt02w!v9n~yLmJ*mpzSBauHNeE<9@wn1>|AVjs8bQvWpG zcT;7k>a^=L;*pid$#m%to91^4yHmfi!5Q2HIZBX)LwBqMyRQe#0TG|hJi?RIn+Gh~ zNZiAyq+Yl?;C{`x#a0dtd8wlp@IoB6J9RPRl#ac=EHD53srjPxA%j4D*;6S%MXRB5 zpe_^U@q)DmeSO#YlQG+KnlV`bztHiyny{z(^Cx~6orl}B`N(=x>6pw_z;3mQ8grLY z%mNFQ$Cyt?FlEyt`oIvAC1+*wM=J*T8^go#<(#QLcgz2UQk*vu<5^%z`@E*Tf#%DL zn~_Hluxn$I1Hg!7y?);X=vYtZA8n+`wR$4@1a#)HwDh!8!fN<^lnE4OZh@^qk+e^btvn57kU;|fjOK`RbK^d5fiFuEVr~gmOp<&fP+$1~_txGi z)v4PXGajDVRe2M(ZnF;{X2t$=XGQECGJk*dHR$9Z8?mak_hwfmNvEs*W^7!AJnwd@ z&Ee5_Qez|H0c+IinWw;eblP)cE!6AhwE5j_fb-aae^)8C2WzdUT)ezpD@A>57Qqm5 zg&b0#=NXnpd6^yG!^#HOrdqRyytcXoAP{eq2R$54vssQvM`8PAN;OGM$nHWokP)Vq zYa_e2PY>eM^{-Sbg)(u39+izdu?}D#%;U=wybHCS2P!$2)FAvaXJ%Z&7Lf&@qR^y* z`H(|U#Vr0?CTBfgI$eE62q#Xo-=w+bu51K7Mv3p{bMbc&_@)Lwcfp4cuVad_=~n%W zZFV9$Vr*NT{7vz2 z&6SoR>IP_-q(Sqsjziy6|97VGHLn+L3GWCrA@ybc1HfahO{|janWe{mbjlqmSNQov zS@aB|4yP9u0&^qUw;C5Yw_rJ6C)wnx&xWc?!Ml4 z{lC9^f^dgU#I|d@Mvo3-ArOMo(*e!fKYiDS^bSv>!}eL8N3^R2 ztNT9f+E#&8&w4^VB4I|0i+$tTUhFX=GiTTx%5?mv;V4qbkx-^4X=u7rdAX{>1z+Yl zgg)&j8;MZ{cui{CJlbr>jIIvz!*A4Q{)vUnv@$*eL(B4vnOm104md(ECtguJ>LQO9 zMD(v%h&w#yja(1yCV&g!XoB(p1@{(_bv8-cu)@qF$2Bdx5*uvTQPtUqtQetfpbq)n zs)=a`_};6;Oc<;TTi{fE%>DfK^*)gx%mF?Gd~9WUfo5*%4lQ4`)3VaPIR<*8H$Md zyUNGncc+!W~w zs><$xeZTXHbZ7I=IxP+Vk`_Vm5ZK>>B9uRN`kn**=4Bl_J~l5)5Q+C1lUU+%7AIB7 z$wp4}6J z^HcpT4O|8mBku<-GyKjLk@Bj5d9Xf?2l>0l|IjjaOFS_otf8GLwuyp7C7?=4Taw#N zUh|pjOOSy_Uqqm$PE=0dI`czq@|xAOI+Zhr((kG-A}#?XCggI)MWAo4ifQ1q4zeZt zG($Wku|b>MWdx$|cux>$GuCmIB*D!oJDHK6>FQ94Kt5V%=LqUYlN=DKx&wn%1MPr2 zKB!x;|5lap)s|z2w>oCBVT6H?0TtNE1k=(#nj9WA#Pk`Fcf_V= zku|djt_x2iFWsz2688pG{?#+@oy8Y6Xn)M+GNL!k_1q1+o39ruwF)WfihDC(6|6s~ zii;*iA-GqCzyGE8K9{ohf>uCn=>63TMxli_kx!Bs3%hKiv-=UW$DY%cQ z=lmvrXG47h_p(;^aJikAERrsPlmF7@;6B>j)7vv_+4c~T?$0lA1fZpA%w$9pFKBk4 zAxT GU>c|L(v|$n$t)|NINVX40_XCj#gN8wL`G?Mt%LG3;4%&dK)vJTzhg`(zVZ z;hCH?eXJcu6B}@u_|jM|fIb@uiSI}@QDTrv)wX~(*6D4)%Pm_Ui(NZ}d;r#`-awC4 ztDTNtv-(f2IvWQD_^SCWRbjq+Y7!Gz!l0;44#b?YyU#gxOf&2*skQBA)M^~JP_b!z z^<6H0XRzGS;DuP~v=UhCW5>4Yetv$vE8`gm@50lRa3E9K07_Kn$xFloYQmULuO$7g^;mnI`$C_fXk__3H5o zZ4May7m0YzW62 zevp-=D6M`S%xETXcowLxmNUEjx}!yL{^-Stj{M`BVMHi|5#nTD`hD(J<<>bE3nf!r zg+nG=l(`Ja-+fzb(g()`M`MRD)6;wf);iO7e!Ii!&xT|>LBl(@uS~k=(8Ce_Eu-fZg}za~bOst#8`(OFTSc*lw3Wp$2HY(o*N< z<9OGRb_0sL%t6+Yw!-5P!>VvWF{Q~Mvi ziucKJ#XP54Vdtr!NNs(~R%7f|2f*P%o)sul?V`J8PZRx!k?Wv9*w5AsOqr`VV`GX# zn$|Q|jp$$cIBFgfK!F^hZx$YX(}?+lB(%ew_v3s9R0z2S@iWv{XVlzVS18kJPMe%U zmetWcx1QlC+q8Yt?NUe{RPA1Ab#|Gk$PA+@pJ{DA|0kB}<0ybv3cQ6ILoRiBwqx8l z?CK2EuC)Q6B-DFvu}&RCzzE4ZmQu&vZCN~f!i9ziyh ztWY&Rp0f$OwRViwi!m^sVzIm&9yg%QlT67sO&lH2IL%P ze?->HvH7cqkRN}#?%MM0+ok=T{rESf?T?8WvFLulnFeNAW2Q&e`B%j69waFCwia`7 zyG5uN#Uyi|&{FUu)iGqs>RD%@+RA;SbRGdhE__}dkUN`Q$~bam;Yf8>+0;)my)|9& z_l?nwFX`g?0~?t;V-Fe)m^*P4pUoa*@4Q6WsZ|}6-n+CFs;Q6yu(wFcX`!&&y)Nb_ z!a&I=*DjZN`H)#HJz*zo0eYl1J=Ce!q>{+^SrYWxKc{yQQAt!t?WKfIkre>YTuPYu zCFRxU3aG%g*UQz>YNM^(N|&*Lw59!`V!>iK`r9obg1CA|yzu7$aRYKmRJTR>q@SBUd3h{>0+Q+$&pscl!{+xO_03$5r?? zC5xG@I)DuBN8^M052Qp*RA46%v?LCWJ=pja{$H2?TGgmr)+lUww zszc;DaC@*1(iZ3T%1uHrK!k)E<8|CS*bw+Z6rC*Ej`LSonyXzkDssh)i)*VJXqyc` zr|9)!V-AxtR~vtbCCIhxGp0L5XZ4&)8DASG26?t^0m)43Ju`oauM~*~uI^fYB;Zp| zs4qmgdaAeI4Qq6Jcv8BIMnc=4J-c&f1UZ#0Sk-qJD4^!}t;M0=(@v0I;!V|BFJcbh z=ZBBm{kdOq>tco>f=^{6PD?8f-}g2QhMY3jbGs7fe)-0t(_*MlyL1!MJm0&ZwChIk zlfm%jNXIr|+l{97+2;@sy?R*6R?N1)fJ_M?r8Z2v#VDLRyuD)|!PGqS+B#RwK!Ai# z*+j=wT;7+2hJW%*D9fJ)3dwSF^>?6RA^?9LJM!shWi?O6E_+eIQRGA`wUGKp@MNex7`d>r1dgEto*W;XzxWC6Nv(9P@6YiYL8yJTI!G z^zM2J%K;lIU`1((r%X7ghQAzR+Gh|C zr?w48JC9ZJ3T#$mf#3zc-)Clw!~}mcZX7Iykh#s4yvxb#Hcoz`yJnaWX-mUG;e^Y? zV&qJ>NcYM7FMa+*^zFD{b7XCPHfz$;7Z~0Ge5Xn`yZh7EW|>obaq{tj&oTDu)n0EL z#XOmkQz1{L#X!k+SReHX=cb1(5?o>}2a(`GIX8yoo|dAy2uKq37fm(dR|LTXTU#$! zKxyo%sr$gAr~CW@`w-Q~JprIt4Ut(tY^kp?Wxz+=v6uTz8vODL#DR$YC_r6J-O=3k z!cX?}-%RGii`!DI zmy6V0y{GZ{4eb0X`#b~KB@QOr=T3?qimR}5<4BF?$KY_eM&c7r6&rE?4C-n8b-rH+IfbUf45?QY1s|_=H;< z_Mo6>`#kBrl9GCdjA!g`BW7qi2N*Eum&Wf0a`|vF0pGiK@Fzx%MUz1H^B~+Fo>jQp zBHS!aOv*rNVE47F4w?9z>CrOXJhKGmy1sg3QmnZ4#GeT(9W4SY)# zyuB7$nuXF+6T77teFO9ToG0Ft%^%)3Ul;5e-D=~0Jn9D}E6yXqE==cjXy2SjJ`Z zC77n_(ff@lO&>aDxFm_vc7{r??^dXh>?15xT-|0(ITv7hFYAX(U?+#xvey1pKlnFN zDTkULOXC=$;lRi}ZDfI+zhNXv*iu!$ViULbJ(G&OG;J4e z*SJ!CUb=llLa7z^T0RkKIy^hJC*4 zzq$}Yc#Ae1^OxNpzK9p z@ys`+IL&zw0DRSSeJs>Ih?~i3P0){?4q8Sx)wju-zBAHXubYhgs>)x-aPlXv%%qcp zr-MQ+WdC9dC2#rMYRe&t`WuZ@ZS6^4-iFkODMPgNb>zO0=gT6&YgzVN2lhzZHW<~{ zS)?~0kbPbooR)OVnpN$8`S5z`f1aB-lw&tcK{Cz*AX~Bts43hmdNJ5u(PSv_Hr7ox zg?~QEIS(66w3+V6#`?S`GJki)FO@6bd8c0bORj@GTIm|#eerFvjM&d;D|{znA)271 z^n1OJhU3j!hU|tEP4$k8LMLf9ghK0He_VKAOccpJ(K7{mDl+)!mhge$Fc!K4*{x<8 z%zl(bPbY1H_J#WasRy+}AcAS&ao2o!^OcmgUP#qEvMkdTS4GLQk$qAZQeQdkBoL5# zDHU;hjZK}f>GAbv1^3#m&Nn$Ob{5P|&rbNif8*VdYF+$#*M}$ZJ(z4W_Dgc7K_TjZ z2$*(#b%o5eKz9G9|%3T!__kU(9)`9((h}8E~@I|8Av1kWF~?)=fh@5 z_KX9q_94VH<<=DbE9$2@=Q_*(p;UYAxaHBQnFMq%@+3j((trd%+GmZ|`4bQ|P$xPy z*jGEK7LMyKY`OPwpH`eU)gu=de=XUxWr-=)b2QaCITJ2+9kUaereM^HkUK_c*U(Nj zJR)kYJpUJ$P&XUovNQ$L{t5_E3oc-Or6c+fYZ_b;PS`QO5jYu4NG%t~8NHW`Kb=n) zZ6r^%m%UX3J1z$3%0=_6O=j0#&+w@D+V zp|OQ6{e6I&1vJuFb#7>A%RJB7FV{V*Ax@%67pNO}7JrC8HDUaB^6--@pa>ov0J8{v zz{_&V3+)CuUg2jelE2KcOd^xJ=5K!}O76Zoa2k6uGYW76Rt`{&M858~Sd*^mn5s2E zrLas+*}8f;eVayVvY&xxv?$!)cVxw(=Wh(mYx*dUifc#!20D92Lw;Y{g~Y=XEbtIu zLFd>c-xQ+8sO1}&c2OAT#>7Xaz zy5t^mdhSx*JAr)Hk?M+=G7mUE$RS;#32ZQfcn#affHTu@B)c*^ypZQG8N z#3gU-LLIlTu&9HLI<0PR%SlFXbsFe^645yUs@U1%D_{+7OABjQfozgOf1Y?rFed>M z>gQyrK1gH=wC@A76pH=P4^v%0YKJqlvf*y$=gR8Ba&ADt`-O3WVLYTQDX_Ik)HO|r zA}q@DRsWSrj1s{p&{dRDJTgymVCEtuSkgVWqKJWAf{kBT{a0#xfBbLa07ONtivl~4SXef; zZ@rk5u71}0Q#Uvtn4TBnME&>M9kpeFLT#z2tB+=q?-P1E;km+(0F)b`D7|!>biD%3 zI^=1!2;5~Jn@LvqhjQAR3Ya_~Fj)8WRDJ$de4+#eixAv$I)GfqLOJV&h2tXsJjWi# zY!|{NuMG9=2JJ@q=`qQfp&ELdl3MafOg|8W$*kJnyJW12anTaZC>V{p#(&kGD|s9lF0tf$%IKt5{ORQB57%hT;G*tTV889oRz>2kEum zBc>OLQC}={ER^DmOC)hoy8Xx79Fr*eX0vykRymt$_EyMtbnF%Yp{oIZ0V@Z@GK+H( zSL^pVIJaInE*Szh<9Mr=Rw5ofB9#O&c5R)yweV>%g{J&fy+BHoz;@V;`0b~?8Jvu@ zMI*a`4U+ZUaDrmEhJlZX+r@7isTBvF_BwUX8yIHDE3vS7d+W?=ox1!GAu*m{g(TFf zApNgaA;Exo*W9k>mu8xfCQJKSZSB&24#W2}Qb=;nc?ux29kuu7c6+~aZFPP{Y=b)f z5X~Y~`1XSDNJ9jE-naA#?+~jq5gGEY)Vyw<{WdtcI=Ii!lKgm=lrUfYJQv#s5@_Z0 z5#%I25-m1i^(%|kB`n_TlkGU`&cGaqJZXkZnZLO2E?|W{R0$ll;GlN20FyQLQoJG& z*rtC`Ky)Q??(AdY7c8L4q6z^ZW+r~HlwEPPV(PQV6kc^6-$J$*vrqRy-t)g^$A2*fq-(ZMnxDa*3NTxosPWRJb$1*w41;{$>7=eP)=YI!L0-Rf6!Q+; z2%nLM?@sj&|5KAb$)N)jH$Ijq-5U>9SegWbHe{W?hBxXfqz*^e156mpMQ5wm^D1_g zdXBF(_oX&;)}@Eet{u3*@03P(<=$<-m_}(QF&-G?1HV0BKJwE4SIQ7ul>me~^}5A- z>I3g^;3+M$L49{Sw^}cMC~HX0jJHk(6W*&jd^MUsgu7I0}1gFrc!b^ThiyWw;$Tmuvq|ha3mJ~uM~+%+(1cW^`oOdA_)-CQ_Nar zd>Gq#Y9?wMCOR%r9_IzKP}9U_9uqd~2?|fr?7s}lwPnMqyzaNF zt2?(7_qSw`R+>Gpc^DH;b2PFE{yq5=^E2IVOW(;j3iaJl+g6`SqqqG0Ulea9 z@Rh10LX`@B*Vb1;0Bb(C$F3I47eISmk#BdVlzY6Ea>i&DC@nk$*}m94@OuY}Px`!p z^F=-9T)~);zcfwykTtK-IJ`~z^w#ko4M82!PQ=LP=HRA#!sjK$fxiIE-)vsI@gJJ$ z;&~#*iy@3)qa}MP*g;fiH%2MmWTI0w-gNZAzf#Kgd@hX}?>laB9ai|KWYhuEK~~S_ zJc9koci`UaS;&w65`Sqv()4z}p@^i_2fr$BL13-RKaKGWdeqbIco5N?5T?NUrqyeH zRn5xw_8&v{C^vzA*3h@}3|{$_U$f=uAVxN;x%dg7F}b0>>7CL3+4d9HwnP|AJ^caK z0_uYTcC)+CpEQ`b70N1yYv?KilSVy4NQ) z`O)9awdciVU7!Pm;z<=oy8i`%iTWhav`6GhVo-C_*hvQTNBjjWl2~|w=oNw=`u@i_ z7Uqe3NGr9?2--wZ{n26ZZWP&~rKM4@V3V&zZDx?M9PA=NK0?@T-E-S8_IB87-wV#K zfR}@&d_r(5V6c%AM|ZS&N zq4S!{GCSnpf=K>Ic({Rk{r#gBrkey3hgvplHA2NJ%}G3f)RE8S2Xf%bD$T-8>K4E^ ztG|bzd%7Mp6A7*0ZpC0`n%(8(^}23;y~C>M{{BAsMtV5?WL0tIdPsKFhlb~LgIJ=WhQCmGyThC2)<(i8SHC@&0WIc;Zv(EEv6E99vV)8s);?<8QjxPHp}x zb@KlWnZ&YkhyVE_o%0QIE@b}A{_iJRUz58U`1cC(|491wc&6L`e|>bP6Otr_b>Eb` z6iLowce%?ghjntuVaq8YmV`NMtArd^3FWXQvC3h|`M7dgh*iU!XBKPbu(8eTbpPJ> z_xGPaJZ2B?>wR6X>v_ZvyPwXT8Ev$;Seag#&YyMK&R%hpnaGY$Tu_3uXlru3yb0hx zy)d(GHQ{OncD5J_6_jt5<%)-VTaVOeMx1Pwu8MQj|C3>QnNO1_8}DB{cA%Cp0qi;F z$N!i2-Mlj_`-uu~%wI$~N=}7_A$U8z9Rp<}D+Q4Vc2VZNRW?UpDgPhf7kEuqc_s3Y zM9GIccGadFLfJq?J#&&O^O-IGDx6vlkQ+b-Jhc{WVu&l0SrqT1D5vC0svKrV;VT`^ zM*dR9=mGu!I4Gr7WFX&ph>6&XD(s`8kBaO&bx-{mN&q&ue-#22f#Gg42^dceejzVj zz1{%mrzlGwErUp7&jYK1%Lk_k;W6!$uXo>)73Zo$esdo%pPj1R$M?9~zkaQnUVg+& z@=FX#2jrca4B7EA`V{1>EOd(0Bf?xHM9CZFTYHD&r7Dz zscVad+W;|Is-U`}8<<%H8{%~_wk#^`KTRaD>hnOMvdbn0KEDPzK6Fe19({Mx^Sz)3 zvFC*VK=JVuso-tJkpt690S+_y247G9#92zePl9BaYKVB`uIfRJ^gct@kO7W`N3ESf%@8x2WFgAS{C$JM7p~GZc1_1!=3;bf5E6=4t-+8Kabh>c|(rPN&MWdDbzBAB6OT1=PUov}dPfz*-73w2(L)mX}1sR~d9 zef86g4;?f0=mPx~h_LEZ6EGySBZ4*|;j;S>e`So2$QuZ=UY)o{Ov3FQzO9;>_!pzZ zx@_`+wJ5-WT^#Z)6E%BQ|4tN~G(ncC=Ln&{DUcy@>5BLH3IM1#I=yoNk;a}w7 zpoDFlKB0baK zri1%+h23*?$lIr>yONRj8t+0{zWX%4q?C-amL)EElsCF_fNbjn14(@`RZzP0O+`9P?BM{Oo*{f_ip^5BeX~DK4Cqle(b$_o)WuB;KKfVs^LCIk25zNPh z7jCINM~Q^dfb_0a`B00zG?`*BH za#9UbKrHaQ_`F8*-ENoTgELNh1v4ufjSU>If=JF_Q;Lxwm@2 zTb3>SF$EywMlHGRZ|mA>`IG=UEn8&P8dMT;5SAajyXAZ))q7$WqE2<$Ro=#uu5+&6 z4aK?j`ZGx-G0fZB z+veW)iPd)tvr)$_4~|s^9qj1Rg3IzXfOeNF)$jbu2kr!?Ft66E7NQVtwG^~z)g8b% zX67zm__GS#7nU%mjon1r9Rpg0gqyDQjYNODwn+FnDOmObt(SOfWVsHX5+BXV5>#XW zLDC^&v+Canw(I{Y9L^?eCU|^1xFOw}B;UEE6i}GOO2F5FL)UAPb;%brr<%Mn%GV_U z+a6otp^D}2kM$p3uwo`AbXuX}Eq7D1k->lVe4{PKt)QO%w0!3!6gM0?(>>n=Gzogd zd<;1Wd*+f@ozT7MYM(9Cl4Ej#XgpB@keMc^iN#GZ*-|l|c$w?5YyhA*yXfOHzvzd+0M-Kn zf39R3#@+a9QPm)J9%!HXquEiei0r*3CCG4cy%3=IqZ}5JO7aOHf_#dy!@ky2QmpC8 zMkPS9wlsvX>qOw$IgoNFJ``gACYn;C+eRC3>lfKTl6@>{`gzr{8J{a5syD03t4ZVB z=QYT}>6pywjw4ZQ*5j`xeD{OZnyFD(gnT{4C&%%mPRB<;yA&L*v9y{18COlEb1xAe ze}heWg>~pq+fbpRZJ3?sMjMgLw^lL*@fLV2GC|lW3P+#Jt|=GcyNoTjVH(+lrMy%k z?2eM_7WNZW#QZvABTJB0&#^tzgDMSVN-i?ZF@el#B%i4?S%!*K>V_W>`ELHv0Jd@nMp zH~wBcK3S0Af^cmI*lCRNQPm;NOi%P&zcVJDWx9X>+my@ z&0d6LAW4tz=pKwT^TC3<{CUHXO+H4!dH&R{N|^f28N=ONQQK+Y8rnT~?ZaqGNchp5 z;5G7NuwLeD_2KI=>ic_}~ZG2|)hOH$>9J8T3h)t1c~d8GRBf$SuJa z_7~>Z>;}ajTu7$xd{Z;bQtP=N{0Hq-FQ(54_~O_iJ`mCiURddoZCt$7pGt z3r*hs+_B1|20KTVYTVm5=)To;5J+E}QyCOY=E?)S$7p~AHs9>|he5cS#C@DX5 z#I>OCyOVEpOEAZ+=1c{H4d7$=KD)dTU-Y&X=p*vlD!}YvFVgE|N)PoG?u zA*=N62?vVa**1R?Qsyu!f&p*e-ElX#n07i(8-pt?7(TuE3PdAqF0>U42QI=A;EjTJ zbN+WII3U)&D~Plnt!6S?34+?w4v-_W$HX3H@23|oA4TdZHrlalo7tSH`-W(WbK&I{ zi9Vd8z1Kopy%)*+mKh&2*Ad4hWt-{1=j_^NiFcUp_m3KD zrn-wMBgTS2ZvO;bt&|hok-9*V?#p+222~fq-8~F5SWeH!84?zfTS=X zKGtr0qZb03Ayv@pDc#|_dUS2^o$1^unCRB@U| zq^FUO40#ezYu)7YzJ7Pj91tYoerS>I)j0{ zRLMWc(k>y>6);*jeJutMb_%dq>L?TTU1EpSHq%*2?=~&c$2yMx_C;4*8;`H2Em$Bu z2aQo@w_zE^6W)x7vN__J5FzXgRdDixUsUgz%-jQHSEXT<7Yn3P_~g~Op2oB`r~Snx zi?4kTUHl9DWjhl`coKLDP3?`-bvV$#`DX*POK7&1RRWI$&pJjIBECyo}xe& zz8c*GD3B)FNq{AXvEZ63@kJ)7s;TmGpW)d+NR>mAUCdP$#pMD z`q?6NHKtA~5AF>(UBK&cEA^K9P3KD$snT~5U+-qWNdWpFPi>zCaHGaXTU+2`vd+Ikt**0u%L%W~qJqxkm2ME; z1GtT)73T|3+ll*ic{+q<`Z;O}2iWC`Vg=w?kE6QvVAoyh2S#Men%s85C}+SIbTvsS zjgUDIi5_|_97ZIX2{vQdKkwrA^PQ@^yDUcCL%nOgRq{so@SZ! zEQ)5?>wx*uSirQy6Gc({FB-U-T`#wWPp+|~$RQha%1*m3x$kmd>!vJA1g>p7c};3PZ(7B+9fzq46x+o^%dI@BK9>I+T&`pYjWv#&D? z>gRM*@`f}!wnD6b`}2uL@7EjI)A3PM2-#(M>HvnNa^|blz`q$0<1G zgO5I)$5YY)KK0%ZO2*-nwo9RNs=rxw0n~!Yb&|ilqF_F8&@U?ST_4z0bzcK}Fr~YJ z!-%|SSp;cUA7Lc2)IV+Zs-GVCR{;Y+(I2W;l`}d~m8JqOLc3tR4pVGiQWDazcm3wc zl`3(ZHVg3U_~6hliEN2{djjAM)M_JTQ-yXSavVzd#i$yfoLoh&`!epV@Bc3aGYB;fbefwTaR69jM3hQ#7l%tq`bujxfQBT;a~r(N7g8S$OSJ4gQX zUxiD$`<^LS9rjpfluErJDv6gv;`V8FsPix>^)7aE!@S_P*B1|3QFP^94Uta)tUFxf zfE9VLcYNA2h;*&28LJmW+&1j~R{>y0cHJC}?egbl$cT$=w2W?&(1)^fH8s|~3%%a` z0=A9k{V4SNz^RJyfT~g_<$^%1YLQbF?$|A!c}ovKwC$|#IdKgW7s1-=^A8XIBr`Z7 zjyylvBsluPM^+{@9tOq&DrcrnaA?nyxTpP4Fdhx%|BU$padlYsr)}V3S5d&c{3~7- zHABk&aL?^xY-yF-Yc^YkehhTC+!fpXz}hOk{~odQV{y3w2O|WgDZdZ4$yU|la>xkS zrse9jM2eB`eX0C~J`h0;8mguFel(d+qffCQ`IbUkrY{=XC;*Z94uFk zzv5v$LbE&0Kd~xE^GyU?`N{GR>HHrWJd9&8p>2JQmJ+N@Eu!Ek`<@G59K@ zgQJzQiv$$5*u!>r!~3j@jl$sy+k?3y1>@e~iL`>%p45RLSGQ1%ZWeNe;}#Ap2AJXd z3E8@dxKqXP7;Seo%-mu;E1&^o>oW9Q#?-N_4HKA8FYddCs) z7WOw@uOl&%M*>cE20%9wquItklf$dNAc)Q?ZGjkTskx|;+1wU2=iLr-xf#azb2FYt zSLCl2$P7PYR85M`;zz#nJW9GmnrRtbvdfd|>7mQ0py?1$To|gr9qg=aqNNOmg;}R3 z?*vZrc`NVEhM8mfHI@xCKxG1p%mCSL6to;- zgv%8YKb^GinK@RU<1>15nlKp{(#(#D1ERn4uVy}=6B`WIOM#Fm2}MJc;Tpx-ZfvC7 zrq!#UjoV~X1jGr5iU>@-AJhMpnphoa^56`TnJRLu`X=0T) zJp5iq!LPcmJ@0<>obWJ>?W{cBSz)<4h%zDu0bUoh-IljfZEo`F;|s0tC%p$Eh4?xb zHQ0_{nCgVsS%J{orpl<3w&ZTkJHPMnjlYy2_y1F+*k*0~U^L|%X^)zGwG6NnvU6`* z(tAfrYVY5?p1{|AByTz}o88nC|1u>meXumSG#hBSC3!#>MPZ~0Kbl+knT7#a{uind+>#7W3xUs0A!4r75E*( z2zCJhBQ+37gsfnS2N|O?P;X|8hdcB`d6-wfeqw^_q)8}FrXoeREw}`D#7pu&?LmX{ z_pSejWOm$*=k;xXE^cae%J;T$^B%qsnbxv!tqu_QsZ1=R<7z4Gmj6boqH&*R^3gM4` zNP3N!#8Y}4<@yJCSSSM9a$nxm+>MtQU4PWk00fsEBcylJJ<3|(tB6$io)ghFC(oU{ za9A(t=#Kf@drsM|LArowf4+pMJQm914E#y&d#HXtj!kwo!NQWJkjgKp;p$8AiT8W5 zZvbv3&U&`>Y%KXA0d;bId1};rFIwTUGFhJDTM=2iJyaQyo>R`hfY#hU{r4_- zM2v;2BMRMOg9)!uq~|q0$C7#iY6KsmYdp1&T3I}Xo)>uQujI|dtk>F2X7rlJMsC(P zu|IG0vE`Vus156{R|Mqxyv!Mepw7sO`8>K`68lZCEoLF*{uY4>I$=?<^@QQ!rYjK9-hTX zZv>QYgX~Q0S=|@E?adMP&rJ0tKpRiWja$g6gn4zGbO5cbS))NLwjhR>@D50MXu=2- zfi*M?BwbK|=l}7&y{Vh*{k8o^{C#Iy%rJ!`C(l5?Z00_5%MQO-@&4_l(sa*a;rIky zf3#h7Z_DGs1@be9`X?rvy)3%fTv2Q`sS-G}U5Wn8uGsG7DjUXokNn&7`pY1RG=_s~ z;{39S<`3Rru;z(Yx3;DJ!4XEI)C+9G0WIY=3>{|4E#U z(|_ELVbZbAauTpu-O$bX3(p4b48I@3e~1TP^qTI-7JffMEL@J-029G^ED*C9#BFzbWF(|e5yWS}R({n6j5eEDDEb-ti-(qh0QJAX99uY!Xl zgd?geD%SZ_Ki2pH^riKs9-#U&%~)7u+I{3rYgz0~BCY(_)HksAJNA{Me#Rd(}37bkd_G5_J`y-$9y6KulF~A+eUPe9v#GH{*w=FtCN5YEVRH|GH0Vd zS&?ZCCccDT2|W$}G5-@JourYtE1Y3N+KWVV#)`|IC;GjWC7pNq>&C(j8}(0ppDnMB zeq?~tJ5W7Z`gg7aG5G^ER&v$jgCg6~M4N7h#x?AeGI`~?K- zfgLNl#@i7~+IaOxK5lUJ)K0%sL|GxnvhwPGHRHi2&Ur@1&|nq8G3~*^K7aw+gYJSW z37~28{m}ylj;8;Rio&`cvN4Tdr?+n@!iXo^_4IL$ORrQ$HUL65cp)Y78t9U6ijX_3eV_C)ue6ZX9RhYQ*a{oc&#kOYW z@RYdnkaS>_toL7c^mDIbe0)30eOkMT(`H;mgIud2r>c@aEnL$atp#+7n#tk@&Er%V zZI|Fp4aCd$hj_+tFHpi~t~FgRIt<>$L_D@Tp6bjMroIo0&t2b}*An!u^~o~@%bg}U z*#sT&I+hB02;n-=ClY!yg5HTrKNZN6O6g_OgZPOvUqdzT2gb8>$A=s_aJHtz28cqz z07{V3e?Byb5M1w1!uSq+NXYF(56mleEbeg2B};=VlO(%0r-ZW}6Q@KNhx-QO`~i(U zfYhZ~ataVrQ+#w^8~~%xt!Ztsb4Bh-%1Mk9#2vYpkwK{dJL7W&<<-gH!`(UZAFHq1 zpRQfc&-6LsU`}S9o&gP-wv6zuAa7I5dMZ!ux?OZ9j1x{kY^KLLR+-hvqu|Pr*~j!c zQo6Cx$;98esXjHSOD&NN!-7da!aC(@CE9A0M$`~Ll|V3`n=opG`r3S^W|q{3F$Uu- zTX;p}7N1*HssUqP8Q3)um-^2h?9_5bt$(r-90}U9y z#~PzCrlRLI)crh0NOhowUf_YD!k|^g8*uT??zs<>;qt-Z&E3HnS~J#ex{6zp`d>@n z2gGz4`|hNCzC(uXK|K5~J0COF%1BXdlwmtm=v~bR7Q+AyDrvi1 zTWa*9ZzgTP2LMxF{0#l&S@mUniWtZ*}+~5}2Lk4~pA)fPO+2y3!q*%z@W>qW{jTI*R3tmw zs^<<(?dQ{PYmj^by0}JWyKPP*XMS}O*A^_5p>qePLmsJ413fYksm%~3xe9Xt+0(gM z9-dy7`FYls^x?m{?{~C3|6@O}dNIo=6}SJ(_5#E2pg~`*(e`(Eh^hGbO(ubT>9?V{ zUy)S6Vg~QuOxf13t?}s5{126=t-%MME~L8Ar2RQUK!S2}>f+T00-ykHy~^-KtK5pr zB8%`>JuN^I6M6k+>Yi)4$4215Y$*;dOU?}S7V|eTUC)CG_eOr<;JNa|0Hx7V$rYdm znTzR6Cqxb732ZEn_?)nP_*n)N)=2-E@U+xr0)zqN6I)Z};OoHm8MYMjC|-ll5b{RQ z03VF+ENN~w0p?XJIh-*1Q92L&^xqEp_IqMFAdXJ3Oa!%VMjM?O*MYo}#zB4*3IS3u zY+b|a>iz8-@?4kf@3{JrgOMSIh)F1CA>O292TTUe$QMuN&zGCBHEFk zZ7*0^$*&Vx>vEOfM0yZ1pgk7Qd$Vj@Qr6)jv$=4H@5u-VRq25sN%pid#%i&Bi=}~< zmWDWlN1r=G6DO0C?=?fmZ!Cy?*6&!~>$Mh;?b;3Nek9s5u-W)pVg`C2@)-!yg*M}M zw9hKvrPqdt+ta8=x5loV6)qWy?LQ~d56$8aAp@9O*>l&{jLg26f}?-aJlb^k3UT-3 z?pIUVWiNwH_kPQa%bj@S?S=Qwnk|ZecD_*gy&~Rh-3O-S9h*F0V(AgKl0TYPE!*q8 zD!m10muWGlaaOq=Gn2Xg zOMpkdn?vlqxv9-4g-4X-%THcy7G9#on zo+RO)uds%30O{~Ha+s5yKlu6+AXEuQO z7`1raH}p)DLxCUqU9z|WfJ=srmXFj|g?ankdYvv?l5!!GO=-mku0<+=7n=cW&;y>C zwVZ=xDMcS{%xBy<1-Yc+V8t435=)GQTfT)So|nK9m4B?1tTPJ99iZQ=Hz@G8G83Qi zWJJXc6C^MS8l7q?a=-yEYjNxCn0cgQK&|t(oghFLp_$w%htLF#;zRO*ne8>QVobnl z3Q4u{a7w?R&4e%&T9{yN`UMQiO{H2^yUP()Z}MEb*B2Ey-%t=|8UZs=$j44|Nj5Lu!}p4FBuzf9$NJpLf&FtyY;#Jx^X1=kQ%x(e6fD9S2D_2bUC{T?3A zax45zjIJb@=RKuK%{OEIj6(y8L!jX74S{r-FE3ABw8IaN*5q5{IIz*w0UiKkoKFAq z{h6KFu5>K_-Ek`$G~~y9{rCLKKC}BSU)t>8YyzsI#Zp~_etKqH+q0t;8V-XSa0;-) zUUB#nxWjHCuSqeTX%6{=r=G_pQjPvEmXSC)2o#0Qi?N7xxmEq?h9zF6hiOJuL*rAj zJHD035CNlEy@%ea2;(*Mf4ah3WIE7Z{WZ?4bbx@D{_kRq+hR}D%VGAqY{&l>5PF}2 z^dB0-+H*YnzR57JDx!zta+zPB31que<#j+F5~S>6u_{fxGu@KZh3$GEhqzjgd;uU9 z&N}cNFMpDT1{uN4hpb#prg87M7`X#r5y8+#p=d_?#1s+bj0xN85`t7HfqMH zXR@rxjJ$Vm6?@7{DxIT?^<2bwQIUm}rLCd(50P=`5uW^sEHSyGxsR#H<8Q@H$FZ-G zZmEvrU1fkoD400GVfYG*5vaGw+Af#yS;PAk!d9q`$Xjk;r#8%A;w!p z`0i!|Zl=H=-@|_p(4Be!Yp zxVKCgxgwr)tN`ppZ)V7wBsVKG@Sto5qYthmRf;^kN2F%Nai5U*)l`9t!?{uZWW{az zH~=+T+Y!seKUGwqp$i=00sVY19MLSt@*rVVSIqQQv^!cgSB?VamMPrJW&E1uoBvdr z9wnShPq*!`9I^sYAw*Es9W#Dh5LAdFR^uaqzq#kC5ZV9tyZ@d_l?`NyjOPGuM5zgV zrdIv`q@P}q|5xq{Tm^Uf(gmA1-ves{g=ji$5l~E4fuO%aFrxrf7y!Z(N$`He$IW&K zd5yoi>TyN_F#?es)vDkU)&C|y7gYc^=D@GryP9<|_t;s?zoA|fDD$D;Ql7VnS4G9T6Oi^IHd-{)Tu8CIEXRKVW{6rJVtTY^ zUAv_dp01+AA|n#zwkE!bDSuvp&=A z$%qvugDcOyYn4dc3;+`OIM?=~g-{+#w^#mMLsUrQ(1PcbyN}z^8S_5Mpe}{` zrsellI<_n7KZ{C&Cu3?YmI8SBOyYyYSYGO9v2*N%Qdi-?E_;H{84`k*@;Wg`mR~*_ zAUov5RKhYAFW73%60+^|6HoElRVue4SjPifugPUuN03W8cMlOtLNQ zq#o;}AF9TLIrwiphtxz!&jFtb%TH}Y^#V-Y^m&r7(#Wk4upRXKPW0zTSOLv=B57=S^T`R*( zr4}g#Z}$FkKtWM7`_Qf^3P^p4Sm|}RorycOCtv{}D6C`U1Pvy=R&`ki;52u2j&k9Z zb_YedySJFpcT=KL6UOvF6SW}(1wet6^RTs*X=zC;`K{0a>3l!2nQ5hP?+ah)!E)w3 zA`fEOx9+MwWHP=#G8wn)bU*91>p`#H%cRz{PJa)Ho#C#uSaHX3<##8a#e+so2`>U) zw2Y&J9(4zazMi@2dE5D3H={OJS>^RES2KS=FNt$exr&1Koc+Ia__ z7msS3Hz34J=Z*v4NV#J>2d&+VU4|X1{fGrq&6kc%}94cRk`+i$L6 zz7y6bITS03jwG0Lo3vc2kGmG?F(#zw0l_a{i6>l!b(Y6+mT5{lsnB<}oXsw~xf83l z-CY`4Z-2Re|5u^b-^4=D+C6O;6Vp-6T60iiHM7+H^9;_se>)0nR7-6Gtm-2q&dKO7Ign0lfq$?tR z9?dV!7HoOl?%~n9E-VY&}~oMIJm;=ixSlBtF=br8G`HAPlJLv>kijS|yaRz;KRX%~!n=IS6=Yfzu_FnA%0G0+H_Ee_TD;J%>A#!3B-MVV*vQ zNT3Xmz_DmB&_tPBhiO%q=Z`)|#2WhI2R1t4S#(X(O;VayEYL%4XVk{^$f!ez#2+=` zx4(hU5?IB?9zV97w#{|`-}p%r5F`gNpM5)2(W4W}gyc4iUl2Edsr~oaXhyroH3=-0 zX+zh)x$()mOO?Lk6f<%BtOD!VcDq>` zpOB@ifJ|U+sLa2}ocF^t-10lsl&!&>N@he_eqoq+^`Igk*E=I!?bF2*-QW|vlf5gR z&M?h=?X|L9=}5&SFtqB?V9oNS-s^2fs9gUXpux4Ho(5<*ALUD&IvJ5Cj{-@q?mY7z z{0y<1qRNID)(qDTdb_AkPJfm^=}si*ZOx4OV#S%teqqM-xZehf=F;Y(r#2${@5M#f zQ4mpNF)TrMHWCk0iiE#}*FdEECVS=D+?iYkDJ#zlpDsV>%{uo{^fdOKv01w|yUnk4 zQS1p$*o}+#SjH#UpH!Mteq(1&HL9$Ks)p_o7OP*TE=W8(Y7C!|Gw)*5vd4()v39dY z)?AfxRHu)l182&rfKd~7Q}axx`Px_aBP8GoRT~Nr%KFrvpce!|rIChYnq~*j;~A%F zxrgK|z?Y}Ya5v;tDLsryq_cPTo19LtYm*Nwqi`p!^Z9F@XK-?NM`uDvAo|*k01`ls zlpl9F80XT&BDf~uT`}r%4T5AGiiW7lNdPM`@%q`s?ad-4aaaYQpumyoQtP~cw1+>l zk6*Jju`Cux-x7)Z?Ybw{S~x%GBmJ8DKMbl;H2bI#N(3~4`c}}J@X_oW-+Z7Q}+6@Tk=TlL1r%Ph$tXUUH@ms1uYC(r=M4?#by%MQxU#L#(8M9Gkz zbh^XCET`@EqOEyt80HLBB4-ZOWAFc~u&wPHFyU?aW&krUsq;RYl*!adqhPLP`jd1p zvw(wcH(FOt@ru0<4x7bq>mjfwQc6*TQ0wsa(RVU@T+H1afGWd^wZ=4)e5t@-_#T?iJZ}=&=lPhwLg~6Q_{e;VgD-ZIGXK9wvOyAq0*eT#xJ0) z{YkgDPTgObdc0ka-m+dUggs`2ZkFxV4SUpm6Pf$S(`V30DbZ;oH_uQ#IVXaBy>2uM zg;0z{N9INDcT4IH_Uy?C!0DYp-i8@#X7qFEK3!lVb@lX4u*V2*7qFz=8ybUaHq9g& zfW(`aEVoLu!25oKf1U`J0ho&Dg)fTu)w(0U_z(Wx_s9@K+?{lr`1Mu&)|0--RFM)u7d%d=hau{d|C9)~<9nBIK zpTroha()+G-3gzDQ=ryK61s-N_8+>23CR?|tQ2w3!%wj@9eL=Ta|XMWbOT@9cC}b^ zk{H?ny+@ve-G_Q)!6ygZ4fn|FxLI>JP0h6MHlrhD`X9#PMY()klb-{S{trdINpF6+ z_up;IyIHYVu}g6Kba&g89+caE5jlpM@{x2Z$MUwjx+Y3XGS5}=PU-wLIbcM!+|L4d z1F37gEQ~7u!`{Cok+yf!iaiN18tsM@EV^#y#W=eSgj!Kir2!#7F`{uzn;dFDg)!#7c?=cKQBb!&A|MJKy=Ol zQ|gCG4`~l*Aus$|P4$e>nZ;iPGeW8>v4q!{cBUy=O2d#Nqw?&e zv-g(SfB?bS^5p#-eI^*=6Ny9WpbUq7#_1D1CoAnYMhOj;l!&_+XV53w4&trvQ50F=r8pT78r!QPvLK&ioTR30*-$-l&SmwDY_yti@+y7 zkZ(Ij;?@KtI z!lTPldM2g>zAL30ae&djLZkKl5>LOOyyV#%_gQ+F6E1KTqd&ZGG?xR1DbPN9Gxu5i zQM$*ZZ`{#QKgVZI7dC6xGMTz~`3DEx5uWDqRi5H`uJZ>@gZkP1?Fkuiz45qL@&iI` z+AsHdF8=cOLgz-J$H{#l42gm=fc? z@(S$>1L_f(GUH#ZpPC)gq<-1mv+L(&gj-u?-u3dDe7MYB zrLuZ5b{gJ30SM_PLBBw1S37439>;nunI8b2S5rB;lb?B|?oMC$t+l1%y6-~%NsAt9 zzE^TXjY5HzqJojqfz4K#lC8~diL5Ye4h8SdGZ{!YQENw~DdCXJM&07a>;h4<(d%K| z(E;dV)QSlf0nj?mc$nb|^tTX~5Q2lS=1KSiB|Q2kS_c)M$>JK>=4q-r{02k1U1*6C ze;n~YnP@j8-i$vl*PCop!kmc5 z~mJufoxn zZ(i0lLt?)yqBa4n0k|u30=~}jo_{<0z~bsNS7e`Jb{pC@-oJrQQYYVg@yw-5G8N6Y z#S#^U~a^fdaC=6UuN6 z^?57eXxas#Enq<$xqb9b`224NlsiO1X-hE4HY}Tmt`94puos5C#V~0ryqwj~04gWo z6S07<;{wZ*cj@~|+1IFNakFaFUpusH`4E8moLrTjm?a&bwZIdm?wfRY~6GyIg? zZBKa1%)~fL(z#Xe?sfQ({dzAzNrE%i0n?qK&`yWDuKE~zV8?<8nr3=X<^vP$}GAw^%0yyq({Q4Vht6ZK1ngkMUpVwp+4uK2ta|?Yfgp{TOBC zZV$Uqz?PRy%6|*MN;G(09`0ldmDZOIpH{z*k@-S&Wort=cO9CZj>NHy)^}UKH{OX4 zCN!?GmkZXK3cD)tMqL!G$ECNAR4!NKAW#S*^c%0KwG#*GKZOOQzsXY(`jFPuqX{m$ zjUT_sjJS~QLA~zFy%msWYmx_a?VqY=v%|g{t(2KMKF+KuLQA*wttj-`;qPw{3(@;T zHh&eN-o>8RkF#|(!CyEWKtRAu;b%!b%Ql^(7UJliF&qA03-4;9W`J6WcoyDE8eq=t zZMXI;q8rPGl>Ky(7PS(L&lTdb*7+ha}*8raW>kb+MxFNKNkdFgdqsES+6G zjJ95669FUy75&|)^M-KTjqKD$`{4i@$w1#jGl-|y zW%tKJ=u+D6>#6FWDO(>}$1bV*(a%gG_J#bIaSAl@UvFk+GP56XRo^bJ!gB|X_jH>1 zd2dNC^~$b3ozdnt`qACpJ=P_n9lAHp)w(F`YSsN7KhC9g?pQHsp2%#>38RIt-2FRB z_a7$ptre31ClCM+uN{c%h8G>cU1lz9`T-ES#YU9NBxw~I)YQ1?=Flw1Br#|YQ)JTbSm75>3`-(I^ z4OlBIB*e{(PSS=mOv5bE{0Ka_s{Z??FJxionjDy_t{;Em$ev^m-Bs016*~T@XoBmtj%Eam7h=7xA29a7hBOq{C(3l-5Ukk-yK6hZz+j9Y*!=b!77|? z(&dr0^J6D92>_p<^wxRQeiHU8a7Fcd&||+t-sPk$Aamb^q_(v4zYEuGZJb3`m77nd z>G1tDW>%}0fo^+$vE>%qHpZjQcm~y1AXzs(9wvLXxVw6H0`d6WZ=ecrdZs-r6 zB71D-?5TT)s5?dssP(|H*bq5odY!^Xn;$d3XG(gaVe=wX9m~| z3$fJNmndyVl|tBw0y2xL@l8aP1oX;Hhf996Enm4^SV*LhSlR6+xK}h%;uevbW@fa@ z?*dz+3D1)T78dIy9uven(d#IlomHeMX~-yfKl0aew=okJErZm;8#xJkKySrA<>$W1}|7)wSCIiB8t5{kboV|o<@t$_f^ z04Zv>tV4J<=gn_Ie(5H}{JLWkMc2|T{tflIA!${=dHIQTB#w1p<9&;Hevn`Jk=#nV zzhlN58(_s-ezHn>P2^E0-;`;ef9N=yAXp_sVR%@W$U8ks3f~)iKss7CAA9sQAi;8T zk@;GbEmg$^Rjpv{L79Z~cOHwwBfF360(3s%CqASLC4%;z*GK02g=T8 zXKt7Q3TTJvJZ>@&(*Lo`sIk3}in9B=;_%BFR@@~fVUxYOxn@~a6B2wd`BvWbn+pH4 z(a!Ej(tRWV3n-cgS`$|FGw-b=@PH&eX)MwH$Eah3U+o$ruk9E?%|0|xl}t6sOF9G zJ(_$MJm9GhYEjR~6mrD8Cka1Qrf45LMXH^n@t5*|qb<>7Nh!jVs+3*&LE{S{^+pTr z_w29I3_4KD>by>g`mBV%EqxUARpoeb=qbYS3(^LxjV{1%%y<-?{7Ooc5oO}Xy z@Jx+swvIn&wB}e@f~^M%?vk5hZL@q+zad3YA2+>D17@TSlDuF&-v7UII;D={ zl;m!eq;gNnbxWN%B~~f-EQFYA$lSI{C6_D-xoxL}%4Ot!U9QV@GR%FpVHnx4ncdFs z)A#p>R2~oZ@!99~e!X7Lo5~pa@wu1HS7^u1pPYe9gPRZSn-g$BV-+0eey9{ebCT?3skB9fJXOcF))L*8Uj`e*ltBZ9Sdg^O zyS!E)=&U5sPef!pS|EGdUH7D#SU{xrNVZxS)9O~YUIuz^qO-Y)(NFTkaTaEdE`ipQ zu@hdK(JB!@f5E@U?%|fCE6WG#2;Y_a0CZbA`{<)pV2i;a)9zGV%h0*M?n>jgHd5}M z^8=Cx8Ht1bqyI@{1;)gf+dw1AUpdbNPKx>~{r`QFdl5XbYpSwmTkdmk6!)S%qy;pB z*%LaV_Lh({*b6!1jEXoC4Wt=tufMU=Iy?9DIvX^-4Z>Y#6LzuI-GjADW{nQFqz4U7 zS~$iqDUKyB+naH#*XGNh+qMrVj)C$*DYvaSU7-8=#kRoNVW!Ci%d+1z|M|EU&&*S6 zZ4RS+?ur!^ue_tVLw|uQV#G3~IT-0W^>jYAW5>58cGfRr@Zmz$Y9q?sUjS0i zpRdu-+#NpYzZ_VEu7iE!5S6=6{=gibBfi8RZu_VO$X<`e?J#C?E7 zF1h7z->FvPE%T5w-Wkv)o+@xv zd^g+`Blifur081bOF=T$gqNyf<7Hg+2mCxShsa;a`-i8pyqC+@g8X>Fm%fULO3&hZ z8Z|#LR1^7m(#95{!>#Bc@S&wv%~Xo>7|rdSS@o$$sXxyU7UWvL0xTl?b}uz~rViLm zS5FpLK|e@wNolwR&;INpLhUAIsWR&2aznapJJ&o7obt3o5dkMpPc8#o3tAZ`t}bYB z%y9+;5GJVN__>ItT)Wz&L6bT+HxG$#yB;}l)g^p)rEfnBg%N^<@&#NjFOd#lqnh7< z;toDtNdn9GDPa<4JdY}vT>shS_e`(OC9u>#*WXEJ3K?ME*<8#x-OVI$Q|Q`ehCMU)j4- zB8ZnCpT4}NvZcoBazQi@6bN|mFXaAoyPUeQTZ|Y;i8paSd#B-;BM-E77^y{E0LT*G zOyVr((?UUgB$g^5r}gjN|DzU!G3f?`e0gK9=$=-P84z(wwM4bhcrHE^Sxi4iD? z_1L?N7l~iSB0^zq?|(U?-oUToIe*4UAHnMAY2t}Q{?8yQ%N~_2P&P0`-+5f1Xu(rS zOKsT^UGbhAJh4vN3Ey5L7u#hN?e`m-IxH;K?R8hDcI(e)~a|w(e8jB#JHbs--IC`+mM)Tw=mr;DE>k59|$$q?@ z=5L=ym9?pqL|p%U_h|33J3Px?z#87SC2YhmHG|pWQ17DMrqGQD{5*-Cl3{x6JiCFB1h} zRZ^wNot1wN5Oy66>yA=bCKVSVKwS_uL>6_$66fOFRkozQEuWTo9`7UmJM77n3Q!%% zCwnVR#!jZf;urWcKH}0!*#)OuS5f{|LEeC7;2lb1odaK(tJURe+~^$i8@09JWwMhK zEmZ2y%CnAe>B>U@<76@T%D@TSBGE6?8}`;W$W+ZjI_|9GEz~aNJoZTJBXfDG%@mI- zYK)z7`fZ52(_->RDt53kR*zlWE;dOzRow)bQaJ{lta`W2_#>TSPRE#f6~jrN2!Lr50e_09`T!YqahzJDhcccbe8mIj?ut!gYWc5cY(*WU^`k7|HY{ zeQn+yP?V{B7#H8UnC&C?sTM1^UML3-EN_KhVf%@W(e_wp(l%61!jegnDFYjMTKun$ zty;32m@=gwS6yZRQWF?8HwSs91bKlEkOC4)TV*PeI@&3UWnOH<^4#C5tFWCe7scJ9 zgI3nu*_t{50&pX|7f!=tOI3DDuB%yxgb{wp?AhPM1NgbGAjy-wVu3bRX}PxB)|Yzn zW9)TF2qdA#;{08Rld-~_9BH2lU1fig*wE)$+nIEX^6RiEb5YpOC`KV_sCcO46-XYj zt6f3c>NL&J{Xh}#f(ly~sM&AN==ZMJ_TZDxAhga+2TDbJ6<=WI#?#wXu0D4PE_3V) zZE%+>wbN>q7iGzcN8z3UoaNR==lcKWfPMUcFF}9Tvd-3HE^O<6DA=^X10ii!E%WXd zR;mE4y$lof2-SAM@Rux)r3NfrbeesOHI3KbXi*`06yVupl4npX+be`Myuh^MSBB@o zttnW)?A96+T^Kb%5hhka1iJt}`ieO^@)=~l=(|Uq@Qgdc&Jy_ivk=2SPPYkinD-I8HgQyIC13T8ce>kP0-jp@`N!`v$3Om$@fOy2 zfQA2>rm(XA&6ai~ri$$5r29@E!90ce9*( zMgXt3GKF8Bef7jc&E*shL;QD!XUS+1Lm%5IhNYbehrpK3%%hlD%Od}rU;og$rdF_m z$_T^61wTjKz4-|EMI~@b(jEy>W%mCI+r2ar6pOK2|FsG%U5Pcv4%cDAnfbLZk3=TqT~z_Y7} zNUB~UCrprwlU=dCQWIo2#AT*l6`DGRv3gfrZB0+f2rC(>3Fu%C0yU?>qad~O&i|O6 zm~zTI^0#iq+hP6khiPXZ)8u|uw!89xx3Lx5fn1~~kI%wj2jzkBPawgAQ>_!5jRX&b z+Qav2hQvCECh>T$p?oCubGfkWmRG@w`-72Y9h!H% zHQC%a0>-m8zgxT{K5BZ6`55umPt($*PYEzoMo-#vBYDpSTEakHHcN^Y_r98)&;dQ{ zucp?5uc;YXJGcIq)9;WsBpT1PZgD1wHh$zes4{u*YpL%(hH)VbLIs4wdSj^BP-hu|uDn!Y>vp?m z*>k|c%lBDei}Pk{LE;T`2LJ_@p!;$*qd8JPp2~a@|LvH+qq!#QE5vp{V2m;R)!v0@ zBUpjBmA+Me60LL15`^mDk0t%@9dsheFNVt`3DE>jiQJFVSC;X`P^{2|)@@jbt{F%B0>F*9^UVOrd3NQ0CxRMp;mu)?~UR< z`QU(Ico`12AadfIjYh2<1~{!?$Rxf`tbq9kzfg?_p6?MT9_q5Rx)xhve080Si31)B zi@1_K#?4bN?}0WsJpkUoV1Da*1pbFB96%_nr0N$+O-0rPmTlK9#W@_w^L|-k@;VCj z6By#!(OMHIJ#mfg!haHfO{z6j#twxQP!VOdzuX z7!%8=d6*?RNQaYj`TIwyaM5Jd)>dRSTm?BVk>BjOG_jDkrKMSFKv-t1VsT$Gpr?H#Bs|6AmuoTGJiw6i9R(u7yc*l5jajC z01wQ+jWsHMA|v!+KZ+YjrremuEH+^^-(<^bv+5ojqhD2-HWRhlUA5YQE}}g%CNW6S z{a|1OC$rKRGk#rH0|${!s1K?;2oP7bKl~>F=t?I;ol1bWC{-VShff}uTB*N_pR9z| zkk=en0ZQWKc1jf)7;|^Wo@VtM#3ghn!6tMIy@4`a+iWQ&kN)%1)rHuX;+2A_*~G6| z3S@XN+G?b`882o}_S|~XdCKxZ%G6}84fLG6*pfzG+rirCd^VDjxU_RrrjUvS*wO(G01$^r#v;g=#7VG&xQg-h_;u!hAtBsxkco0 z=t~1*ZHj)kXP7zppt~)#3RVgvnCDf=J!?!g+?Pz2QPHv+k1;oD#VMoQ2`25V2$l2q zzECTS-97{RzdXm$;1_mIZoo5lvMlLQt@2+voo&p7&Jq(40h(eS$-Y3XOYs z7Wfn1m|rOUEA;H^5MzZChc0SD2*TGwTb>4eYD%%YU=Egq*)`JI7m*A5>h9t%Y0(G} z-_+hgu0Mne9G_Leq{Lb7N}dnq^v97(^jQ@L%mJEf7mI5s8^I~n5m#xG^Q2?;MH?@SYa%MG z4xWSnGq>-^l^S#>Z2#1_E27WJSXo0;d?`B8{6R`|OnT3E%~h27e-hzd6+OmWE{O3N zH`jW4PDqKbPdN7-k+65k2eGgD{K>Opx)*|hm^qs@k4h)FXGr-N=E&8O@a-@6Mx#^h zYrI{~HjZf!!P4Hv{@-ta|;gnYM@%kO%Sy+bHG-2pHieVHx)K zq_uIh)cyF*TXRE&(QR3RtO zsN+IJL9^=58b}5LFVzv+UjjYhH^d**@T@@O>8k$;gfd%@_$4t^)%;AXE$qFmnZk<%NyqFI-;cgIHg-k6`Zcqd^36F_k>o3f{Wkpq9k< z6S?g?5bSf=cOna_n;3qfjC1hF^?Xs|799;pKiykWv9z;Z5?+eTC22z<8S@U{Qv@Hb z--nU@$9yxjbz(3Y)JbBDY^~|kxx2NOX@lTT57~~fOfi(Ij{e!0AKq24`=mOS9JDF0_+Vy%*%Y@%alO@d2RU$=LC?CqAM!nGC36*=Uj zvz|qa7S|h;imxJm!%q5kP*~_#PhJG|KMAGDz~HX~pOz<`$HJ?|mrWBDAM*p%)kgrB z^1_-2K#f|r%5f;5!6s0&>5#(DjrD-{*)D?CIsAj8!1-{IeW%{I?fW0R`*Yzz86r1? z{Ee{_qzIdF`ioOXhx8y-)V$3|WB?y~&vc9EKFX$~AkBEFZ!XNbPN^mfb?a2ZcBVdT zdI@ z{;66Zj-}qF*-rTgH~7rk(9;&7`--rvnxfd#zx5fDhrv! z@W)o?Qs(DX*n_K&t$z~Mw93Q-pjz=)U}6v|$AtbXycJ{b0qnS3kU6^LTLXWicXpr0 z7~~tV_L)645q!IcUL??qSK(~p+|S~2P?Af4yBa&c`?DyQnhSV-S_wdj+Saa=Ro26+ z3O#23%*3ytYK>(&brsnZ^#Me1S2OwmFyCN2GPUH|8o0-(1}LX4je*?z#xF2ha+z^K zJ6OTFOuaLmB{U$MrjKdOH-aid&r(yBziWl0#-xR6_1Y78hKVz5<5HYdx$L>#Tt04W z6kt5MxohnceSVI*(@x8Ea#POYD$c@_ZhF1CYyOJkz(jP8#-oStkyzKCFUF^^-5eLxwamsDu zD-mBj3}6%W6d+*a6|JJaY2j8QNe)<-0g++4CwXozE`&p6qp*&GDdW~?Y8bgbf9kO>3RF$IV_Ch_NsU!@+; zDX}VCzIxY{7>u<_RWeBlz5RKt;MP`&1}sa&&m9(P*&}+~=xTPDCB2Sg_7oK|md2jZIjoSwFxmb-U>9fq)b>TKk0Q$wgQb!?JZ^P) zHUw0rbzSWxX2M4DWINv?ru3pxeK~n!`2m_-nB2Xe`K@Ko`Cou|iP8&W6%Atf!)v%v zcR$f*fD5a&(un$3bF~b|i|hjb!6Ltkh0IScgHMsFMOMmU!^m$(=QpBL(3ke^X#`D5 zelviBK@B6}X|+ECL|>%)o}{hBNg~Aspf?cK6~TIvX>psPxtwkRHyQOp?I>Vi@;(GS zqfu1qc?AriKNxd?KoP6-zLgEAx z$=G73t{6TzIcQ8!qzlEMd1N9lfg3|IDmm?Q)uU=u=6t~_-fl0~!Yr~YHk5|`eMKzW zoI|`(o2j@*`lx88^KM72Lj?R3LfUA^;jz>wp@ul0KlRPA3w>zOXp zke+A&R2hO#+>wzRDcis2w08H?R?RfSo zI2!5`%G#~5+^{3|!$U`e&G?+dY`tsb!ht}+60v7U&8A~w1GaRL_eis) zhmX#O4Ff;8kA%sfv1IWl+=xiVY3TTm(!V1grR|-QxS*h^V4`U9-8|JIZm&>_cSmry zuqdm52bd8>w9ICE2Xhe-u2J~mwKe@cmh{&WJ=~x0H4DkE#yu^?)}?@J;)U8_)Zsru zw+5vx_FPnP+`N!t{#)k+Ce#Ml+=rgiuMUO$3S+Fl&%)kDyr}4zT3NeFt%b4z4R><8 z(Fcsl=LXMs88>)V*f0aV>q~VNJs;w}+@-P*-kc!Vmzoz>;~WpAyRMW|mQ)!ps}-vS zEhZntezNf)}yK<%~Z{_ZC-weEt)gBma~PkeL?rX+W%nw;3r(Zn+#YqCH&Imyy|#j&5W z&~+FVE4o>tis8|t<9pU`-;*&7$|notEo4wZz_Nj2b^W}o)N53U@$MYQwq^(Ah@{%f z*)*8I-4C!Hp5S&-9miU$o=+V}jHW(h$>^~40HR`tPc!IQ7s_j_ik9N8fnV+MM%>|) z-dU~g65a7ce#umJ4eTWn@sjZ9bB_+~ojmRHvuQby5+nWq#_gMFm*nMdvnUV|LJrf!I+tf zsow=S5y1QJ@toObeoUXtHM#DUA%R6U3pRJij^1_MrQ23WVOLsWyBUqb8` zT}7nY9@zR$)kwANkzq663%7P*`-%bS(lT#kEHg9gTt&q1C@M&dL&g!y5YlDzqU!p| zpxA}TNI7KysGk|lse+a@!D4iX zdFb7Y@d?wRFQVGx#6QbE#Q4<_9VwTYX8FH&=EfS&KebjhF`<*{qM2U9TQ65VK#jAO zir@re?lO6ql*HRZ=O!kvsa>r4XYK)v?Q#P5) ze>b0_P1~CK!w1M=P3IDcf9hQTmBa=&TVm)S1bu6m2jr} zC=LH|()z`@|0K3fA9D{SL;7&kyoZLcti|jgzxe)RnI5M-;a;xfgK&*<{#sC?*0|8s zq&KV-ukQ^0GxJRAeTrZ-_wM#EB@MWxf5plaBfOlIUZsiVRzk@`zgP~N=>s9N`s}wl z#h2}AKQ-L^kU_9L+4H3 z5EHcqCPk6>i8+S<7h(OF7jM|%4Vx^HyL%(@VO7=I!bWZ8EP*5W2hXE3yBnlYhPl-_ zhCmc@a3yz3wp!QRXUp6NFcez8J8dgUDZDi#3q8%%1Sb@q2t}h%hNQLU3-hqahf(UU z&ziEy98nZ5Uo0o|Pj_UO&MMYy2687LoO-d8pmABi8RosZvu^{qa)p znI2EKP}d8}O^+fUk>;ZrFJkqmt~>UtStz(71K5c=x^p;sPc*&`2c=tAB`+>g6fF3= z&_mK^<76Nw5Hi%CO@8nLYlV~Ta*qilAz(LL6SrN=i@C%Sr&C|qN^C~thFIB!K9W=f z=R@s)Ls1;Z-UUi1yyM~Sn>66{5~C^e;wogj^elgXttC9&6cvOrKon$qgmA#`_mgEY zaUMX0ziz~FDm~|}(G2yeU=n@9ro-`TI!eTW%EpAW)O^%IY;QB!J;7;2uRQYaAbics zS}fW!}@gM(At#~K&j)YIn*k5DbRmJ2--W5X-DcMr`N8AN``@b4WDHrJty;>JY6 ztgvmR*b7NCX@w5o@%}wWQoRB0{#Vkh!w<8<634g;BLc`(UN-Gzd71h9jpQeiu!YwW z%yq&iTwSvU{X5;p$w(r0f`30!e3#v(vI)x>n=G5oGW97#n7@@}|J4nC$6T?#uC#qu zlJLk~edOq{dg$SVWHjzQkY7*2jPBS)yd{70WI1O-FR$FG&^iPROd9J_X5gS#M!cq3 z#+>`H5H*?e?ZY9xtm;!k9iC#qVqPa?jO4zCG}8h-|9aO3uL=zYxH#msTUS8u%qJB1 zc>+}Wc+8*q<}V4kx@W=MPzvznQj0HEBOY%KrESv{wuzePji`CY?EoB$^pR{OiY0Qa z!qOcIY}|3d%q_)GZ4)7ixUpH zgL-7BM>9hrYvjCopc(M88W?ze(yN1jSLI<}A?kz=JvZyBx?f+fcg1{}| zee!Mw!ZQ5+^>??O2R2iF7I>&&PiSiPA8E_+dAnfy5%%rk6Kqh@8j|8SkNTJ#Xa+r+ z+_sk55`xkWOA_a?;vch|!O}P-UmNMl3>HrISv&2Pt%r*Dmz0{|Bg0Lv$_~$v{P09$ zwBoxsxYXN_ZNXxIxFo5|1HUMzx1Fb(8~mK#yZphTW6rvbco>n;`Fi<$omYP=uusjJ z=tlP33+=wRBH$W-@Z~ox-Jehy0hpN9*Hv+8rmiFlCJZz9O@EXC-2EPKFIIfK7&SD8H0{bd3yv#$eTis{(>#1_v7vYsP4X%3Cl zkV$Gq{yDs-?ImU}dfOfUB2I)LKnlIeeu6jX;$0YULee^&)fRI#s3`MPB7r5HFfGy# z<2fGCcyZ#53UWSFY^*9-e4Vc(%t4QMU*0ydsBzH~75^fNDaV#aIX}|db<5}hi9Buw zVK$1h(UCFQeT~L(A`vteL}ITuyl+`gOVcA(|0DvqHJLzt9sOr#QaksE*^MZ-Ai(7j zI8mUiSbpxMgnLc^kdqe0TTn8m!4Pjm_(Tc z7C>PQhE^A{1eXVLf~0lq0yh8pT7oJaLxOvyi3yddT(LDN6M{$6|2v$?K9wZhd`ty=~J`K zD4S7t+bkcu8PkYub(N4%O#M*~?(&fE#&Hq(XEb0yjSB!lgMe(-upEV+A?{zr5m%hc zqE|8Gn#{)H2~d{r`1NoW-r@O)(F5lk06P>8W!<9ioC5B$jlo88Xgi(FC7K8?E))M1 z>at$FL8SDe7T)ebLM(YAV|r@FrV#trl@H+P^rh%f3NW8!hOPlq_wUX3CW|V+^y#8T z-o5q0h{E1jLo&e`J4I2C2S4A>E`-ZnTZ6&nBS#tkt}~}L6~q~goYkyHP_giGC{5J6iIWRiW;cm-I?!Q>N^sO z7W&2(7LDk_7$iY41@XY+vq0&vN&ak3-^$E|Wt8IvILSERk=e@Ywja0k!_|(>l+CKI zjO~;-KXQ4~qhH&+(G4jMDlXnx&si>8r@#JNX8}Y5dcgR2Uf77V!d0D>CVT0O_Cn$pHA5& zy!_<5I?jUNCQ75(gw^#ww>lpZhIUhnlTe$9c@1C09&r{6%wFCz^?FaQ6pBo?x=@xO zsc>R~)3p)0dzO|1z*(rCvi_Lvy0bpEj3rGU%(anyR{XfL^!!Qm=%kIjT++hu^elHd zIF9%Qd|3Pqtj5bIbU_q%gASQnMuf(dyC7csOw$fn!Ur>VW0W*Ps^|j{BE#06EC?=ZEbGMVO*4y z9j{y_jbJ{`KQ$&U@Z+iYsmoN&m*;HatnJ2v_k8|KBziX19G?okxn#hcrr|?bf|naf zP}O+0TCLu}g!f5M$s7-NO{olR^)3%>pg)wWR7i1`yJDhkr~cPZ+zZD@Ad=$3eRm6w z&FJC2O*er#wU?L0xo&6q9DuV9Zne`gU4Yq>UsjsiS+nho&=VfUaic`3z@Gd*?#SKP zNLIMP67pb0(`c;5d35I4H#MH8?|{2aL~Mym%TUr4iI|*N`fj z=NCWTQg$71bg!#@j3S5+qx=~Ey1nE{lGXoUtU!4e2y;H!RMZ5a6Qzs`%!s|hR_@|2 zW|Ha`DOXsX1HpmSdH3!apNIewq~$~mYWc}v?Lqkio-Cr~AloSxr`_%mQ1R|MP71J- zcZja211u8da-BEhoQQ_1(ET*c%>fQe3|rMb14O&_i)#|2 z+AcG0K=+>XioogVYNu%$TEeg>tO?u@*D3;Y}Z5q*j;J-Vg1U|y%0DSsfIUncWO zj4`n#t$pcKyDIDB@@&~!sm>HQ*Kv^>ue=H#^5Hv|FfOOYC zavo)HZ!ZJrm@f19cb0+Xrb$=@i_4_4c3#*!g8p=GuM^zt0zVj8pajF3HMZW7swnT?v z7V(4Fzv?9u&&CQskWZc^KXAaIk6+Rfe`u}wPh!_+4&wS9rRvXy7V{ zRARjvh3XnA@hE;>1Xp;Hx4pcB-P{@dRQ{Mcg^m>irv;Hx3)YL@OaVh(%9RxbK)TLK zgyV;Wb;4aE8@{+!uTKa|*!qP1#7EiRN%^L75ZK9azL;o$vS=n^R05AY;$;ZjFe48j zdJmXYPuR7*Ap5nQy<*^Z0bKHZc5f5XMlFZNJ^I7yK;!rI~z@m z2?WjK#|Waw0=t_|KfFG%^#>@Fk}Bf0sf-14v6IljIP#Jt2SBXE$LPd^1}ctEF|Ibu z(6-^u&)Vm;YSor4EYz~D3$9DsyQH*KFYD-`Hk2c_k@aGqign#*H<58MDD38XLBvrA zgHjHG5=O|!N^KXc2|%bbdN~ze1mD~*C*yannY@VOVV$$6qK}_7MFC+-tuFG3Wd2Hhic>X-g&4;7y=$FWaxYti&&{I;Jpq^y_N5 z(*ut$KSW*rdNPT4pBj=CF1#$F(RBqG0T+XsSgj489&FnzxH%0f+Q|e+NN#d0OcpDG zBj6=Fvs^CpJVW_!hN5caH;a&vec#mLaib8B`FSJT!-%5xb4sa65_l8Z;t-q2+#;04 zLb=m>Ox&cQ4Pb0m1&*nvK+yhA;!Fp99y?vPxt8*~mQXC#kF_8pj|l%}M?P-0 z-ap1CK8qN*d}X9;aV0AnelWg=fmhYH7(l&$xZi6)Q!_oaUTBsVb1#Ms>xBxB;O7p{ z?&ED1#ln7+9syvDJ3q8>y1sSsM-pjka(202_HRhl)DIgWqw~EK=Y2){MOd(Ub8c#V zLPv98%)NTNu6V4sK2Gm>*Y9kI+B`ZBz;L{uI(f=eWiyQ$Ok0zkhtzV?83kr4|v1B-HdtV5k*8oyw8E$t_nxkS1 zXqq_f{>_EHrJceZq{shW%G)Rm1?a+E5`S@+tFl_;rOE;HVAa)?6CKyBy}{0-2Nz#w zFgbuhuft}-C*i9H)9o^nLOx0te@W~2xqJ6e*e17)EWKRcl7v^LKk9@Ak!xT;e*#>_ z4N>uSJ}|8UNoi{i7M*wo@SmldTMSG_=Jr|n?=V=v!x(NksMMx^>(gA^N4qxN=5d~S zF@pdl^1TZ6Fb#tX4CZ7*pGkB|T$r$G$4i(r4zLtmxca=Lh*R%I+65*LBZa8r3*K;$ zMlQY9dK2(Ft?};?LeQKqF8^MJYr{7BBsaF`By&+T+i6U!`XE|-YVcXv%Ys7Dym&Db zBoiT1Eh^VW<2mzyG$&5Ry=DcpK@##E#vepaez7Bh%sJoyvuU#hvb|IIE>II90qGAWojB5Zoey481f z6?Z0w%K@?yx+a2?2Vi6DU?@MiOD7iH1U}458o%uS$G7*tOUIeULDYZ3S#YtmzUXl- z9nuDftSfn_0%8}{j94}YcGBjAiibcU!_^s!LButB6)RZ~Fm%o`A1^=~+$W=ua z3n6SxF0#%qSi4NcG0SRl1%Ou8A^%m&{R=T3o%)_5|#ZzMzOj9mgtxTRP#8 z8-&vKje4Qgk!sO{!0xF8_f`eVJ_jsS{M&j8lL6?Yy_n-eY9d?T&#mKe-GP{+V1PyJ zUB2fZ{P0P|T@dsNsvYkO0Js(SibrLFgUFHDGo*7^>Dh6i67N;yCtfbqswyH}Pe)h# z&(Cb++fbs=K4-B)|06?u@CA#lxF5cuWnDum96Om{PaNN2U@#Q%du%au;%uIeTCF7W zoqy|W^3`%cN!~OXVK)P^7wEQn#nV)ZwFbUUwo)Wh5djYJK!NdabeX_&OO{5r{K`gC zgY>8XJJzWrI>jMvrcX72pPuJ?f;}EKXt*W(BD3QwMzy$SLC!9cU7amPxng|(riR_k zvQ=MEnD;8fF|UF2*MZV^92qv31yIBbNU$NeRm9lEJVg1 zZ>$5vxez9Rw0j%Ty-GGc56g}vkw;rvnV_rbKe8%IAi&v@B!4Z*ul(dY;P)qkTL9SB zbWS0o|MeF@|e(o~et0MFyu4_`qjf+iCVj`qzN&XXZqU z*06W7s9s7Z9{P;U`W5n#UKqS6%w3Oe>nO6#ySzOC@+U%YwQ(&7qh)uEk%7`%^mjCR zUBw`Da~mgBzghk<$Im)n90$qTkXsL#mAi&=E34uE`awGGElnV^E(M&g^$H?*2PN`M zQT$bGKmV^+E7N+3kHX?0L{*4O@j}c6;0nfYjm4+xH-+e4xYe!CJ8J1HYpQMwdx}Ph zZ>sI$+3KN!!zDu`>}U6*DL z4y~Qj(n5T_Xf6RgDoA-`F$mxA67TgEDBN{-VS=hr$>GcYf=|W_$h2+Vol4{0l~92z zVSrHVe%oBnXwd4@Xj#}ucN#TL{z`H$DSZ5|)=T|2!U;Dn@@2PvY6($%RNS!;1jgta z!Xtew6QVOGH0@-(3plAkWxZ=X+U3HO)j;}rBV0YirRj)Ro6d2ORzlTv{F9B@SUWkd zs@2iB;6n{>_~o(zYVvZ4(IwQf#7^oPUe1Q~pqez#$G1xDG&@r#T+5RCq&T%sC46L$ zr2TlFAPwgDAjkXJqnkxom79Kn!Gb4Ks%OvscFQ2o3$ssd5hDMeMD1~wRUuc(si;l@ zpn1xepQ^3qY`5|0*ca=(!xI5n(sguS>r0-?~H#ye?YYR7&Z;KL;&(7pwXQBOsVY7 zW_tpz7`%+RkT5)|Z8cvQx_&pU)PDJwUc`gOBg6U(WNAv`B@>xNw_Q=&PhQu~~8F z_;sZEfmdx|)XJNVogS6O@HX1l{@Y&S&F5W3EZ)l|;1$a{y|ag-pBnCmNcq0QttFwy z0mwNd8>2*VfzEb#2E1_`?wfUS8JxnlL)y^Ev+6u$w)!QY8a*+aSt4H|yZ(UM!hlM) zDL+hHUZ>QcoKOeMqo~9r_q{9%T6j9*z|Ulw>Uq+Qs=WR8Hs2t2@j@)ucO2<5!ovmJ z17qMngiaN_Y^w3`CSE8T0t2?&uWRF1Q4>gAlcOvH^dYbDBl{)jSS!&pvCic=JrI)n1KL=)R5c|QBT40;vtxfn zl!KmPMA5MYC&yUw(f(l7ZadeDa9NJ=upOG~Ki1UY zk>g&6r=b^!f8ExfcFa2i=WTR&-fcAT&@ZcFMnnR>I_sf(B-P##M>c_>?9=RnQrc$G zH%5an#1IW2Saot|)gF3vud11)ew-7<$l&DW)pXd3txe*AnzvE*t#s+xqV##=B1)|L7I=SV7ip~w)m1w3`+#KTzK4**2#P~>MLqV)TbJJ6 zK;e(niKTtJ$oL{tasp4i8)?;`0Tko%=7nUD<^@UB?`rW+hUR?PUA~CAF4@WE#)s6< zHcfED_6dIIcU7(4l&62!3y|hGdlDa603;|DA!H9~rI zwPtf@*nu?b6z9!JvDUsbA==&1tQh~oHmLwWgFaOLuwgpgZ*AcCiq@t!mN<5duR<@; zS0iTJ$-9yrM?yM$xv(nH7iDz0@e-s%70#-oms0*}4nswnJDR>C4I+OmC??8-(4aIr zl$dx=G%PmOKCw(jpV z;(jsuZ2QQ!`#X2s=t`KASEuBXGqd<(XJBi#&NDNQ9!)Y9HHEUi)Y(dEaH5(9oA^oy zDSe>i`+QaBK+On0sA%8(YYFv=g<3SBB|1SUCpBl?fuDqTsIhW8?Ki63Gl_eVgvGDsoJ^Ew-qh=+HgqeOE zmdELe+c$?A_aV}PpZ+W$2M1{zH6^~NMlI~s*mse^T)$5N?Dw(IY^b4Mho7J zsH4j438k0432E61%B;8*)0qXvL)9Z2p)m$_MoIqdu~>PONfB^`mK zZ}0YT*U&q{D+agE1#-+2z1<*5;*k!IuecJ8q;~po)_C5vR4v(9I}Jgt-^p|UNNsxy zKE*2T5b2sUU;cL%VbiUomt+7Brp_qwEdY6~HHdv`1duXe(F#T)74dfv>jp6aCfh7S zl|mhMpW@kfKWYpim3z6SMfirxqw7MDK@SnjZ_FI$t=F0a=VLGO0@?U(lytKrO%N=( zggHpQQZt)f=9~`8o{r=@O&*B zSsj)%8R;@&Vt~X#TJ_th!p{;C-<=MDYeW!&_~1O)B!sJcdhj7evx98~Vn&sS`+7I0 zx6e{nuLG;Cq&@>@@9VWjtk;=m`s819+8k!pid_vks6$oj`#-T3T~q1WUDv9j(4_Hx98@#D3JEB`F5y6zpmeNXFDzCyXF&L>j5iy)S_Zh);b1Fm)XZ-Sc4|` z<@sS}+MaKeOAw0PC5t~ZYV3L5bA@bT*8%x9m*?$Mlm1x~7`mj`;07k(P*6m0gsqmnn7p@(BMX2x4f7q6g$;s}zOs=3aL~92fk9_Rr@jBGU z6;1OO6P`ZssSj}Qi>C#k6TkM`d1q29Q7(YxaeZmAE+G!e1f<+L<_;&F9AA(efj%K* z9Ji>leS)fOC27znJjiy8H@6<#3^=KNAj2DR{XdE0Z9Y*!zR6IjXMDf{^&LSfxhoES zI8XgWO2~L-K3hvh#Y+8v2*iVaLmpw>O(Y483Ob59o9%|q+Y%MpPB4ml0qb}t42FLD29C!sz+Ss(3Xiy2z8RESSBO2tPDlt(6dD#Ivdd32 z$-II%w(Xiy*f05qgz>t2(;SSa?YL@B2dPa6wMNWi;^x-XR-}sKa&5=9(nUE&s$Moq z-MK}m_8T%nb%txV-LRG29&4BXB!Vh#n@bJy%;c%vpxtd-2)%=uo*UDSyKeM_OawWL za<%OdrDlC<$_ElUT;-FU@Lb>jW=Uo4 zL1t!34jhRKDpS)8N^|6Z%F@h%=APuFGQ~v2+<*%a1rnC;CQ1Ew7L;hA!YFG38Yj1tk zC-uXj<>>0$4+h76#Z&1aj{xVhHWpqu{qLTGSM%UccOaQPG5dymF*1z$bqzq7&gIm1 zU`JQ!whzT#G)I5GqB!UvZLmh$sG}~UWG)!=3EG*{TnJg%~f|LB8#7Pn(^*C2=HaZQk`bWu4*u(59C*z?Q2n} z@^aSA`8@va>lGI~$XQkY2dAOT*(I^l2VuXIL-<4|)|$B84M^+;nRdf=b4Kms1L&31 zww>tqn~~UG2_#|EbFfyK>RY3>zKTT@T5K=2E9MVMFm?+VQ`lJj@&!?_SX8OA%R zBzF5~T(t>_H>w#vGt42#r*ujGmQE05#TUv^gHwAr&MA5CiB(mVl&XyRJz8Ds&3|tD zx~b9tnaM=y3@__`4$)W|X#(mr+~Epm-0$!x<_iAlmQsURT&Cz*!27sUT*VR^F~yXb z>$UK7BlbMJI8b!1xkfu5TU_hOmFprV@%LZlm-ecK0P#jW;B*)boL$pg*0Yos$+Rv= zt;KJd(~PudvK@`{`Xa}_zDw($0kM%IeWBR@0YCz~K}NlXhY_Ngfmg=9O{<1#d6GepGcObu_SB!T(X{UP!!_OLRfoJ~ z#Y7G;V3x8sh8_U=Vpv%8v4z9bH3&C@u7?HNu-t=NE5tW~5QZB>LC%t%$62(mC*lLp z;>~zy}LZ%hlFMIpsQ>D6EKo(xxxkZs6DhGSZi1O8mh{SgB98 z=u=0FeW2m=9^y}9Zh4#%R@S8S(s|GzHDjQ_(hv zm#JHL8muG8m!VxB9cA>$taX-pRQfL{fFBdxAHKx^Ar|#(1V=?CK7Vmh?~M=F;HAFm z1(oVaG3c&lhXpl~yJR;9&4(nF-5vz#!Bq-AG_+^`>Fp&2Y3BOqWNVGYIT*HFsFbs!_@7bZAGwj z`uZM*_WJ$1a3C>osv)GDUY^)NJJm;X84lt(@$%>{q7V3C>kuBSQ|B3azxdq2_s6)K zm{p!jGo^;*)I_a?E{jY{UxZJ;_~&ri+77GTk~fk!kN_`bTKzJ42ClPrvRe;Bksfol zc8xD}s;L|#Taa&Dyu{2rbeK-mMEe0llYO~>)WKi)o_%qk99>hpGJSu946N~>NKI!V zyXupG;wX6e;Tzq;PuiZ`$v1CPoPZ4UJ+-_qtU8Wb> zeb*N6o0S#rfInJ3lkS&VgDT~@JONtMs_dAC9;3qi)r8-Y-6~76Yu}Q15rmZ@;uyKQ6adPUw|j zE63#t!y!{60nE=aMk)A-Yn;v}oI|OuKXy2Ov{J28E2X7xl}D~u`xir=))qI3VEhjw zTqf8$JOfWYjO_<(1BKpoZO^WG-3a3F7V_iK;DtIk<}v!r#ZZ(d2PqWA=*;C(U`<_^ zfBH}D(u#)%=6Kp{0r(<9b&-ZbE&tcO4c2CJ-bioggNTJE9mGgl8K=R+qPaBa7dyCF zCkQFaf8R2Q>hnXT|Ek+wITQbiuT@RD5-!o7Z1G|co=S}da#uUYg63-m+of23cH*%d8SXv z{^yK|$G9YS3I6Qb)n6nFa6*1QZzhtlCF(Mxxg>A3L36o#oVLSM;v8JE20DFBJ^wpM zbnW!XOYNlL#{eXcdC=I2=5c`rb9=Y97V>JB+L_Z8njP~k_TGXY-Y&c+KvALL|1Q<_ z_h*lUl2!cgpA2@ktM9#wHqy4trdz}8fV@mOhTvZ>b)E2#qz}z*er2veG{Zg3=8HeN zk9xV~mH4K5qGl2vks5lUwJIT2(z@WbT}XMl%0wu9O6zm($b5K6D5~FdbZ>aSts3N? zc-w_!@d2!K7n>H6@IYPfS)2Mr^(m1QvdrdUY!UsJj%r!?rb^Q}+56h1#+=oCTqPL^ zd|Lty`pI~?g#wVWB>}gA(7Rt3uMSXk{TkT<*v24-m4YlxinHloQ{s7m$(F>=(ocn{ zUvb8}3B9qlOXbb(D}Cc$NFf4KKqe7P;*`&EbVkfH@?oRv`w4vPNXR+YYa zy)Lp9diJP|$;956^%cY-sxX(6xl!k>{4?U5Qbv&P&bp+bM^bFR%@#>kxOe-5pS{SN zdO4PAvk#bGSzr61^D!>#)RY!G?IU!5?<|ecTa^}mnZV>&S}*O3OpU4o;)lV`!Gieu zaVmXz{WM+iuRc@H5zN8*y6)Sdqpr(YE=k$LoSY!#VV^7lygrywgMF%PRoC?0?(f`ySEr&8*IxvbZ8w+2u1m!fAlpnU%T2rMHbT#|Np-0l>rXiZhEZ zpLD8CCF(;EtvqN88Os>7p$tls1$h7&a{HCSnY7(dOn|QCZ^@gN1i@u zIdz$n^FUfRrK6ezT_P*-^C*)6wLdDBH{z9Os1sippIby)NlB^-%cm@|Q-o8%#lvU% zji?d6u)5_wXM|E%ZMy&?O`rW|Se;w=hoQGp^;*n63O)|~knO#%2>#P9=)iva*KcIb zt@eboFv(ybc2y0zU#<$J)U+zvzXh-r(M8>jBsCk87gjM68~BWuRnNUS#QgwZ>+$6b zVL6A{ZBFTocN-^rodGA;aqki%gEVhL%`N-^xVzmki8VDmg#NnxhMp@wP-lG{he%tf z(rOij>32t16EhYpa->8U>LR44Nwu*pt#IsNo2?8U0TfFt#>9tvx-M5Moy1^=D~AHS z$Mn5dTlqwLIQnJp>pQNOGre>^`P}=A zb(6-hb;w&LwpczOx}wf|Q8Y-8Zi);@?jOo(?}9ot}=f|fHLYpr=adstRQAMU~N;+n1-#aC;_U0Aq{b!EcKfy;@pGkGCpyHTu-@JnJM7j++8$AfrP8_5dKTI_tz1iTc;vEMz z2t#6b?oq9aIMt-nm+{N5PBnNM>1T%3CUv?QRKJD?9jMZ`uj38l^*}cFlWMR0r(XVy zblcmIRiSn}k^;Ytu&2ZQ;I3S!bPtX4pV(u7q3$$r2f>QnvyOp(s^O~4Rj1tC`n94& z#Xu>R(mTH8)HA&b4iuBF?Y+Lm6($$}6jpz{v6j=ioE3ig%fBzss`no-hyzhJ+*Gkc z(plpkM}RZ+VOeWRRZ=Rf$KlJ#jv{*l()D7HpYFfqZDk{R&f?+Ps2CU_EQ))vuymX1 zSk_Kyi%=K^*nVjMJvu&#F=g!Z+)$v47>Ioqf6@Mtv zyu@&}P;!a&1%f45$ViK?iQm_kyak`-rI=0d^tWUTY>+-KSZa|`Hmn5_S7kQpFCBd@@6e_gxWZ()xV?v)tgLHa zBTe8*)U^d1Z`pTEEex8@GjoCyr!JhNM*{5l3?oKBiveZO=u|`d(vu&jMZi1;- zO@Og*#p27NoWnu}E1wJV78_S3bjV(5Y02)(xb|?u0l92r!2sRTWKb*!{YAsNdr&)GQbFHQ`=KtLTieP<{0WxBdbfPIE|K0o3pAd;q-zNmUq;A71 zg?P8Mv9a-ptdZI>lqnsi#ZPDg_9yzVt#h7J5va6MZB`Q?V4PB>?g%G`HDr zb5h9~(vr6Zo(w0UAeoLCU8)aGdi&15)x4Y-(!YqezH#2B38 zC$c~4b<@#4;3MM1;cOnbg#O_3kovh|yLD|eP>`F|O$n&}DXh(0zSNTWaQM9V~M0IX=?zJ6O)BAlVI_%O>7 zm2en85Y7~Hr+GiL?Xa0jWn?EWu>#PJbY`{mG9P8P4EPi|e7J!j=nmV>1-1fMl|I)` z^{!Jsv^2vG3LbkX$X}}*W;?H0=Xsj7e*KEbfBACULcAR%Lbsbmcv*L#tEM~c?T;wb z6@(hsTEp+hcMnCHAI8T2=;Vk2j6}IvGWSpI;KCqHcG{jaY%k`u-lA0mnU%d9BDx}Q zC$+n~bb1A_Yusx7F=UIme0#V4Fjd)?6Ijn})C8gJAA%1loTd+!bPlp?91KQn5iJj=0mSYGbhp6#O=CvI6qWM!3h9ThTG)`dIUnrIrOrz^L0{>v@-Wy;tzb^IXb@ z{PSlz7$2#Cq5ur}fYkn5CKtO_s+^dmai8dZ z;^{~Lx9~jd>x^asPfH*Ulya+!ftMcw?F8d9(RQ4WL-A1K(j>gN0kr3$POcprU_N}> zyRlHh5=tOnm!hF;tZse{3~Ut|iYmkvl*WD4n6qHwJBv<3<;Keq!u-Du-_at0B6&7e zqId)>U(nv2e<87*hD<(En)sHdQyvc&{+4MhEH&;NeASvJ9R1PMmcDeg-{z7yerrK2 zF`YsTr70#r7-Sf8^=8G&6PO8vj3;a7`+lTX`VF7mv)_^g!K1xxVzl+es zyeH{Qn2O3)6&B99NiS~sZ=Oz=u~)voxd z#Ja($P}bL}Y{z3dQz1)M+Q}4gKB~4q(1m_Qr|GOONYfV>Y?xPaZx@1M7YCTu1S2)tA)o+Kj^4Zz@9rZar98u$gQN%Bd+YoVf*XOff$&biQFKdlfn!fd@y|1nEhRD zr8PA!V=E&rb=mOJR;+JQnAK=mN2a=~zeh#xVr}MhM`nFcw{HB!Nv|`XqGVdoN;b$< zG7Q1~!pO>9nfm!YeQoK`yQlemX8?0QlTrl;VWQo0Y$Z&spqH#gNDP|SoWBqEKZ?u8 zlhbhT!%uOX@q06Ct)CJ1pD8>BKRl`QMH%>#IFhg|uP5z9|J3dtHc67VBkM$kKSAU) zvhPerld#jh{JO}`;GnWx%%S`TYQuKL$-U??o^unj)ZnjHfM=}Rs$E8DpteB5KA+d^ zd;e1@d(*YKpEjD1AI?l$Cxwt9>dX;u9j-%lu`6hhc^6&NX#j+e?-;~}TJtu@S9Xhf z9h-lO^*?wmUGLsroAo-_Kc`*h$WsdzIn)MCT3_gv8VCl7IBcVVL-;I8w6;PFaWgUsqw>@=O6HkbX<%t!Qm|jNN0r4?E zFC4thTNv8cZz9-S(DYX!0l(!^C(ZRCED5_lq*GjhJlzSS8hWpEUTV(TNsPFgv@PSY zDbRT^Zm}5avS|4PzjfDki;l{>9!Kh9={piCE$?eF{n!lIlQW17% z-`T%@Ki>AtD|Uhfq3x`$szKK|XfvDskn8=%KNi7a0?rZ(;!F{w+Dhy%$rii(c@5CQ zrrWc|lCdoiWa0d-B4%!lu=Z(tk7CxLm+g92H6JRVK90M=?wOZEQsc(m8XEuDlqfHc zYW2%$KlDGUrT?YvFtW|~;8nv%4<`g@sZFrq{Cxi^PApQi{TGquzmJAh<*Ldf+Rk^Y zIq1?)a#ms!`*c&&UWKkM^@aOXezuSERvGTlFUqzGI#VmA>VLq{FOQZfgp}TWZ4+m% zuaJAY7HCl3mD(Baq5{Y&ZnTvi1<$FYYTr!Zv!Fuw2|KTPc0Wcy41Xt%sLzf9bE4R zt6RK(kNb2zsH5p;$-kFLTd%tsFQ7E(nhSq*jgQSm7a8x0orHX<2X5X}i-!>w9=9`^ ze~uTGy-U>}0lgvA5oZdlQUlO+3(LgF>q%rUA4T;WhDUQ zn=LX&|GLEqk>UA#TY)Xf}k8Rz}ag->;xg+9F@OAKk z4E<{ElUSp6U={^}6xll1krqb3WC2DQ7ek;iQrpsslV#>q^uTW!`!M&otp|Vt5$Q2o zgMM9d!DZbx8y`Z^0uz%RVurvWO@R2>Q}92>E-zEh5YL;vdW^^*Gk_&ciD0Au$9gdx z@}hKkl1xeMZpj8_*WWUlQk~4O5$u_0NCgRXFs;zg`hSZdr^kjAvU$~a!d@DK2KHfZ zOP0GM|9{@#i3b3_jS7VM`p_^)EB^ZV7_V9>alb|^(dY^TyM}&4)=4t;N^BuElh1Dn zQL-xSfd3J?&h&D|pB4eOGQ5btz;xoVpkf z)Of0`!p?P5IIvlGXWRY@B1z1>-!coVd?{KI0vO+7U-`;l|B;)^U0b`i?w8}VkP1AQ z!U1{-x}!~(?;0Yx2rGH_|5fHK1yG7G^+)N;|Jk8qZeR^8$^F&4)1NzB(ioBamYGti zQVrR(-6+$Dj$yDQzz%aICc)$Xeb3Rn@)NNT)_j;<$pxTs^JQ3t?f>f4wOk3{>vQW@ z=q1Oc=*QpA?Kcn|QNFP{ZXq z%U;%0kisSD`3eMy;Q|2e5WScIUsf6d7-k3q)a?~NsO}5NEyjn&CD?6^O*$LEip8<^ zr(!!_dS481zrJ|{NR9%}bZK62e}DJow37g04S3zUJ&II!Nhu4!U9A3Yd^|K%5aPqX zYNQRg@@cCSt_sU}dg%@D@eyFvT9IM?DJ-FWi;>Ize;+vP<7^FzkWIUVGVRK2tIfGx zbdO~4({AAc`4)tHeRcXnd@?7zhwfqTWGZWx6;u%}OnjzOd!L^B^m57aMik7ZrKwaK z7{9h3ktDV0pr&=I>r=!ED2AIqTUxM zh%Oig_$pnuS2HC6);@^%&`)fMBg^Z?Pitn0*nKz^`%MBoC)oHpQy4qkK$k3}18)*f z3T(*a;uWW3*}cwadq>G^^ko0^`KR6fum4$J0TZFnc^k{}RlQBL1dZlO;!A?qae2ri zUoF0>Z7!k7gur@D@B27g;73FN{^&4ZN1m2_aw>J~lT4#&;T!iAwa?t6-4h+Xy|YTw z1~JYfQ3$ud=TNONYNZKE~=Z}dzd05`_TDgCcSeXhNol?@AQT#H+s7@t(Cjm1q7@v>$K zy6^uTk8*25T$e%_u-L1yhi(+h-D!$)KN}lDaF|M- zi~MkM_|r&%K41+e*#S*TMV164Por`=fR?;EJLdkw8wC<7F8AEckqD> zh6Lmh5(WvaWmm(`7Kn1NLZ@j0!4u`3fWOT92w40#JG~Ea(A~xdkj}!+4jpCu_0OEU zwI6|s2jgLM6&dnN?;!#+DhGZ|j*silI{4fYy8q)4dRcID4Cc|j*1NZI6#gyT&ZkmP z=4e;Xi!&d&zjmrY-N__V&kDA#5|Wj@6*2l-=3ihQAvy>Uej-b;Qb=M}j9>QS4BG0G z9WQsJn%pWB{jFAM$&kZc>PPGNHm4NTWm(+GJ$m-9dog>~nbQ+GSLj4N^vu-svoWP^ z?616TG%$>&)XK)wVF~UdZ<-=&2S|MXCuU*j&*u4xXG3HkZv!a~3u&dU_U~by%*|t4 zm7W@*whjfYCJ9oltLPsj?1Wx^=-KB0M?#87?Y^7y19D(Kr_3Dw+S@gsYWRbzf989& zfkPi1;TEi~klHoH1f8E;ihEx*fQinX<>3IImnL+Cor-ot<{vj~aaIp>pZYfL^wY-J zZ#^Uptd7izs;#f$W|fwnFZp3u{Oo2p-#Qk0|G~!5N-^BpRdAg~P+)%h*h@FO+&}+) zoGn{$R`opQ$q6kbP}(2SY}jRn%pZT=3N4mB9LZdzFgLydT~vkK*<5g85z`#oaP^=O zxIc7N2O!HXYA7UPM?${^zRyzg$t+x%`u1HI zOIp@cN~H(u^`EX#Z)da-?8=<=R1A!i3G0bJ*Y_#%tDW6c*Q6J%u`=8)S+trc83K|!QFSFm5(g`xd54V-`LP~?PMVG6iT%ho?>(V2)PFX@# z{C@?hyAsL=esH6@>{XJ4^}|*2I{`j`>%H>(TnCSeJ7p{Hv!9D&1xJ9^akM) zSa~(1Wb7bi9C6lMgXe#G&wgfvCOI7N z*j$ZJMjT8@Ps#hev9n6OGIv}y)pNf5mWjopc0d%--taA8oq7C9tlJ=fi>?WZf=+iX zTj_P1`Y^Q2r$&a{-KJpakXn&14ywtMD^lVs4Mbt)t1JHI0MNzmim}Ks@q@upz|R$Z(6Bhs^(IaiUCm0$QirlE0)@B#%5xF3y5lM;yn5V* zSU1o&dTyUu>$rUbFqnMqptaQ|rEM~sDB_0DSkboIj`b^0#mU`uP=TF{Ep zv4wie1)?`GF>buBSuU51b|z8&tuw!z*R~1G*+;R^ zY6Pthwmi~Jb{#pMWhVy&3o`KQLz}WPT-d%>K5vt=LhfE`V$A+F*n%h^NC)1eeknz#5jAZsap8Xy9yB#MY#&g|0Q_f<#;{gdKScd zhmJ9xEr?y`@k+smpItFuyJ+|}$*Xu%AO~*U5&czVsp$j%`CI1t1=S4dQ8ADbBWA(r zN~tiFwd^p#6KSf(I~MJA#o#E(eTBw$`Nh%_*lJ^ow9_pOvAL3?{CjU%CQ`3RKX zWt@_H%H_nYefnD-7yQ<0n-}8^IkGTLd2-t9&)Ohg`@ven3yHPm-Q7QDrFv@Yc0M^! zV)bkWMLZDW&=?vafnRLE{kRb86IP$FEGNev{i26S0L8q7eU-z+MVG+Ar?OorW@^d)-Z=E+;7ArlQp6x?CtEIAF97GD z$WYioc)O8#`^?|mI(-aB>|-k_p@)@G09OnTS`L`~kG|mAR3UYwtv2zuOb%fybxm48p3{*k z0{&65_+-2eKX)MFnX+P@1W!Y2O~hv$!?K}*SAoe?y*5ie0bt4`0Kt}-H=FJS!>h|S z+}%?6Q}#yr`7S_hZ9FHT!A)L_a z(|u*Xer*fSe+F58ekE{beS`F7!BP)9L*Xk6Y8m8TQT%L<(sON%e?ccI7QvR(Aa;f6 zM+I5C8)oQb)IC*A&d$MeEV z01B4x?2>!?bD6V-LTZNuu8`V!7kLxN@Ss1_YoYSGapro!U!6@tFJzt1y(>Q2)rbm8 zEan+ZANv6y^r|diLTOa?EB6Kr&nF7Vb7-yv~fI9DquCPZ3j_K zkUps40}l?a@u>lTO<#e_;=ZFYQKYzh?fG)i5p+99sylfPY&e8cSOhBy5(~#97>X0+ zOuO_ckor-akui(gFE_xbjcPmfb0bzs72Xt#?a4g&?#3SUAG;20TOC?oUG-dZ$60A= zmuFBY#B(G-EYAB;&Fp5>KL%ZR%{h)potfTO>ACM|zW^|lg#dQJ8fkUX*Y&em@&3EI z1hCS(4K1fJ&Xo;jDZuk0?Ba#+y_C_@lqc0z5!Ie}2p~6>V~}wpTh8&hJ|QXHK7{+u zK=B;?bF^ zVXJmjHe2s403k=G8b8e~m?U?tYQxoMH>v8~ zIIjO&RUZ2>rNljRHSHTs!Byd*%q0rR;t-iKs{KY?`llF1XX*;(LTGG{4zPAx4X3W0 zZXZNFI4j>_cGYv?#%ex}CxLBv3jgCOJqK2+l$;+>#kuI?_AmV!WqU3Bj{wu5LGS)v z6R26&OyM)Ma~u5i9MP>#L;eX+d=uxIm}JX$c_jEj#x&TQzQr@vT7uCpxo4I9klrTU zbHH+e2GUU>w@v2~I~KNzToTdloj&e$8j`#{(lVUNbSX&d4HDxK|5q$~N650)ZPxNg zZVGAC7)^IILzztHGi>DI?zVjxGTacvZKn~{M(;UW!BXN>m42~xMaKN-p^aN4>GA*Y z*)GR!Va^?K~8awd~7Y;a`OJJYcfZ%sQCQ$eFztLEv+;k8O65ZX-$G z=BF(= z&-hCA^l$uwhy<1xI6iUg)iWDB=KirdKx(mj9L>GITRY)cH&UKv2f8pbmJ_v7-}uu*h~7E-CioglvtV!OH^<%>xZfwtF0Hi;?Ze<1T3 z6Ab)qt2@*J2Kpx>xuM#sv|Es`Xe{fv^s|G$4k9Q6R%V2&Gdl^H(_-!5`rp*P+nLhp zMAzWEIPKH%YS<){4)Qgw#TyV7UvXWA!KG>4KA{bs$Cb@oFQD01yfsvwbkwFgcf%xk zyQJR$Hzn+Jhvbeli_CT^8fUu5cHqpb8%=rj(w{tw_66!Xj}0?HcG9Dst`hu)%FSK} zqsG5Ac%-GhusxT43JSd}?&RupV4qHD)<5xDLeq zWgDVs{GNGkt2iVqhzF_h1iF950>nJa4bb`kN^bqivyK2pXrs4dCZee?G}>jv+a>F6 zPMPuWsnoSDVyTA;&(n|#h%qZagMQgMr*DKo3#b3!NHm=h>Hd~?;g`uQ*WxT6M92XG z5037DjUXJz(zos&;054LYWqRK+mgN+w)Ma1(W@kJ%r>%6m0x$xvd7Ny+p-*(HwUXy zU1k?fzn?N4F%a|N*7DsrMg|2u4TZVSNRJ9w-P+b%r>VTMd%}>E!*vwmAPP_*v{i5U z5&@3K#s;AteV3R*R9b1V4x&9jxTbM6^o^>z!ItVv-V$W1#Au|4c6<#6qdw z#=;aakE7$6wZClc`lnjrRFy?ueNesF*srng;TO8IEKcEibDM5`^R)2uV$CX7iS1V6 z1>a=Savl@-&IRa?>lpz(P*45>^_cJuwG$uGT8&XV+9cT&H{+SpI$2?{)Ine$s3D0k~w=Ywjcz3zPm7D>~pmi$d=AwC>74%(i8QB7Fm zS)I7BsRwYj8D8$&W8Q7K=`d0`C~Ak?7x){%rf1ZifT{A|miv!F;SRy)w=(}DaYa~b27Vs3$0=wO?{rOT}2{SEeibyEyZTZCgbE>a3d{S}JWfmJZ=F zyz0Q`6QnP}hwrL*L()!reoAtmoKiOE_#ab#5?m(zJUm$2SW%EUm0XGX;;kw0d5b(} z#5S`?Y9_>|N4GB=K|DTf70F1OCqm7(`S}J8%764*Ga-#x`%@kXq?rP0@#Gj1d=u#D zf!(=eC?)SWY-zzb7A}xwZji#|dUOtWyUV+oIUbkpFgi8W$P~)-CqS z@!txLeMa(YNb^93u+5}{m3?)YmA(IEZUpn~A|S6?u5x;<7*nTySad4o(_1kCslt9e zh1@kd!WBL?dXrOfta3EiwbTZNB9=I#Ra=hI>P-W4U!;_mU;QSsBwg7!hDhTtSy8{> zYe;OZPhv-6V`WWx$nx7Sp4%{!!u=KuHM1FEQVAiDu;A@=`SxnsBQ~~#R1b4!7#kKT z;S%&RzDZrBwIY{Y++ai}NCTR%HUS&@o0>fm?s?Hg`gQ$^6I(jWgM0TnV88!Zno;@_ z0OVJH_q0#@RP9NB3@ls0)6N|>)5A@Zi{nJ7eoo!bJy(f)84rpRB6b*J$-0Hhagw%i@m%YWB~do{_=*uzP=UFA8+L^jGOX% z_QCOT%Ott}Ag)Xa~j*<6AI@i1Hf9e})(yBb8D5bCe^j3Gm(<FL!yEE628tr3E>U1j*WFZGK}&4u>Mxlxf1kOT0=$&8xF{ z2Io0nwXF8!Z%A`i@#e!T|4#QRWl+F=19f^`QiP8a`e8sPIvHnWE~pq)yr?eh1)c_9*p9|9QXeGZn2@!V_o~%Vo?F zf4MzyNo>DW30VeOx773vjG^s|!Y$Lm2uT>AYzWC$fJ8a|3QqsQxXASpfLM6Aj5dnz z54DMP(R%03{H%PkclIkd$C5pLXvQDFif)F48)sHp?#tI)0Hrao02ck5@7v1--j~nR z$4LAP4abY9wZs|x(btZ}7i%y1lo&(GVYb1!9>Nu542$Y~=}5N5hmD0g^w$a($DAoU zDdt>WJp~8#e89xyk7Rvh5r0TQG1Br(L21^@F`|eJ{_g??~|tl2bQS zruF&FL12-9kH4G#FifcRDi+i3DCdsI7oRf+&`Ksu_};v2PYjFY1K z-bW;X4x97Hr12QmgGu&uTitr8FNgb)minn6kV&ttza)aKnD{x7@Pw6Jxj5S|k z2ObS<%;0_zZ1;^_1(}$=$i64pVu?96mGNlx1%orpXQ*YFg7|3QwwIu?h*Pa?sX?2w z<0ce=i<4j*>)18;O!XDdTT{mky^Sx`@u(gFce8JfZQ*nMN52O)U)Pqzzvh(-9$da)2C2ayDBd9ST3>ANkr%XC~+S`*@IENr&1 zh1sP7@AdAczmFe!^}{l6AZ>OJ3g6!r-7LxcEmLc@bG~UL?Raop&Jo!SH4FB@`e3W{ zAVXYa5ja5Cbbsrfq+8pj?^K?RpD3?LZu}xWK6^;h{CM{M+m##o*Sq!I><<9f8QC8C z%;2Wt=0?t3KI+nbpo~&)TZ-P&IWUmT(Z9U1<#&l*=R@*LW7MlL$BBUba{!nleOKj4 zd!GZjDj$~IXbSn`mW@rk$*Px8o=97Kx)u#~t7R%}2I~P9*!9)44Mv;(M7N;o%g|Uw z)vv6-#@zgnPw-Mjc<{=!=^_^gMut0hk08rNTD|ek-)E)LfIv~m8c2U8`$^5>z>@c| z*S0I$NfsL${Wcf}eWnPnDn6Or8IzU2W>BQ92mZ%XX>OHbHqJ!<8K)ei(pnaChl+Um zzp>&Ff<vg|b2M7fx?EDq;B;lsvNm5ToZRiD}`v+L;i!`L*egQPU8?%B11UfYluF}k-! z2m6*M*M0TL5zXiO<S8FjIq}qJ%uSzc|Jq#l^=rK@S<_>unR9pQ;(yCrzz(_?exaT4sTAwJ zrt4qt)Qxivc+&iNvNpM^K7NDhmABHG6~oEl9th7UaY0IEl$u8_CG#m#V;+#QM>hhnEk7Q3HyIiL}VChY>Fm zNIRMfG@y?=&K0|#pUtrqPPIvP+Mp%p(_H3&2ABsLHXR>to!Xr+a$|VX5Ooaq*Q)d2 z)p}H4;TjIi&BCk94VBP7x0wG?FY4Dda2W|I#ELFZuhNCTY%pU>Qp$e=<+P{KpFP?| zPs75H?fq5^3~uxrVLR2YxwRoTD(gB(yrI?jl_5LQn!7^kpLeTzIWES`H#~QN#`fZm zkD0HD9F%Uv#-g$HDuOyYcZ_zq-4qY*MMoS;TCP|fcBj^*c#VqnleLw%-zeK8JZU9162zKgBakkT3 zc!%-SuV03$ECFw_RQrBd)6_Te_K=WIokrR9-Xg-waU{`>a%Ma_LIV{4?!E?HQAu-w zDb-_op9`OP_OWNjsn}4+IdLm;)!n$%dnx+@^bJr?-h ztn+CM<}~q*Fan&^+Cr4in^_zzzDTGq_cL&ul=$gNB3hfYrO?OypT`bxMq^6%c}Q4? z#)1V|we?v-_?;}i1q-ie5078C>x%&*;7l@s7*iON-WyU5X7@c?+>*znjw)eEkf!GcO!=T-N*8zX9UH9-$r@&6Gh&EN;zkWP_4ccI1e z(Iy5t!yuwj;98n`f2MXudMKJ~?0G==-kcX7E3yw!HC5AR213hPv!>k$+TR<||M5J;(f>%5p>f#gSw{**c%8oYK|H>2p$o+q z)wL}A_8B^s3uJ4JLF1XL)P7>4*T?QDvu!+#N;IMq;^`-qQ5VKoK~BT7YpJgm?HF)mHMzk|csn5iom(0Y9NDg<@D2>KL63|_Tm)y5jW(+jCu)*Bj%dhuf(={=+102G7cH5wQn8VnvAfJ|poED^89 z*C4pkoAyVpJtdw;{1l{+>SP3a;LmP)dRUrQ9dpXw={Qkw!XVA%_50dd8{tr6^-#v6 z!J0o()-B$CS7O~v?>hYxd-6Qo#U=Uz+|_2Xe=fadfs&W`(;a`}(EM&_KAK1_3&Pel z$}*CGXefG(!Bp)ziG3&aAZYWCR9oH&wO{_qn)JAA5!F*LW~tKkn=ANY9g zOSXDVgK|dhNAV=lOlWU;atE2Rf5U8bl3*yj`3-PlPQiNEz?>|DJQIIf$**H?OZA(N zUAxJ@sUg9|S)rR_Beb72NtYo%X`Ej%Hm>Lze&Vks)=DlYfITp07>Qx6Rpn;z?bD}C ziz+@|`P$*5{&`iho`6Wn-W!|#qWGLi5N=o#F2T};6U3ci4n!@;Zgu}SxbLTr9< z`%+Yx*On#UhmQ0_LAmH)P-t<7Y>g^*JL5)k`hh)eaR-=4#}S%a#{_1)sADRx8CEND z2neP}X%+C|4i$|Z>$)dzN+nJ=S9kk2Uf>w)d zCjJw7-ifK{Sp`kMuR0WxcC*42?U2$jUG9=(nHQ7t*6~Nlq=x?HU6?y9Gjhrc*W)iz zFJx)k!{yLqP!;5LoX)a2%3*p1fF$qz{b{7ix>L;)nAY8c)UEfW0Hx@+TC={0#U1q2 z1>T0hS+Eb&8U#^Ygzf`2)^Mc{{kaL&EWJt^{J8@km~7U+z*^B(rC5jAKm&hqn!OZ$ zeLNki=lY?GBJKN8<{rdDVi`a8lPcaW>GLu!?8I0t;g5Iv9(mry(g(`hO#-f5$IBL# zutrw4=yV;wbiuUC`=iqFh4Xf6T>!RvikJGvcE@|~N{!$2@`()L(1EDMKT`8b-hi^^ zW)aot;HT{7X3ROszER0C)S_X9uyKsB2K{i9&?$c6Jtg!-ldYkP5N+NAI?K}m`UC3~ zUmA^!#BM@A;=e*~D&j_`renLH^eIxq-+d@Vk;1G`cq$>hWEeDXRzoE@>(Wz2T? zoQP%30KBeG198Grl4=n)MLc=%kJJ^OIhrbc<;)6oeB^NWt)r(1q(>O?2f>Y;|A~~p z>f+`A+>LaXA-E44`n--M&g!V8g&h~J3aG1)8e}9QLwkbpQR|xwY>o=#CrcV${ii=x z&_0Jn6TSXOi9@;yJ=D8Uk3o4?^LqQc(FK0y=uHfhBT5JjgI*IAt4bC`(6^<|ixAwv zkb{`Q!<_aoGLHrl_9xmUvCa|@N#IoTUhfxRGvJjwySl=yDCZlc zd6A3{rciPL|F34XH(Kf%uDt=26|p`~YmPNA=WwQt5-%cjbVg!w3QSmOV3$CX3r`TC zq=FA1imAkriTF=&9KmSgJ@Yz{0O#{aAtXGIV*Hz9<`Hq<)CJHh>x-l{wMv%_uC`$~ zO5%gX|LKt@l-@JY>wt55nZF^nT%2nn(y;3y_CgM4f@>x~LkBmYCS5lGxUg6p!JHeI ztX-A-E=@!O)KdbfHbcY#NRt#`6$PZw&i$9lE%wFX!4J#a1|stM5Z|kE*MT1}xF}_+ zbi0_OeK(5DdjM@;AiMo+(b%Tb%SB`2MT$0#L>QC3^Vovpt&GhGaTG9vwL#ZhzI#+K z7Jl}fejv8TI5t?!C`bx1bjBl;sZ0zNaF`Ev@OG+J_hPOM&HdJU4F3RP;A^jhq;SlF zOIKyVc81$_-3=$tEt)Y&c&|-bAC@b3W>gP6mgM09v~e(3Q)B=cGa*F5-Zc(!1w`V4 zsKCfpDBRdzt=<^NHpTN%DYiReJ@*dIMCu{tGs4C#7dXGueYjPF`{)Q|9f|y+SA=9B^isoJ?ESPfXr4uAO-i z)9}iS3@EKdT>2#5Ir^9t9K>0h5|Oh@uiu#KmM&q*B!z(Wi_ZIHgq5_5wF>Z8*Dw%s zc@C$bOCb)TEH1$KU#xwkKLG#!LjWc40!^Qb(aD2>a=}p(!67`g3H^+DO)b{k z)pi+c@b|(Ks$CA8yAFa{QAdeU*--Z0v7$%*{Afu(P8#eWw1~dXi(zGSW(N1BRL~Um z1*$pdwm=siN#T}~VSqCX>xDF;H#nAjoos&_%1>#U#k>q9S7YvgSPx)Jr}L*Kky_k( zmU1c}P>~#lfEy^w5zAt1Q5FJ6QW9Yoy!NJ#&a4Iab+m-`MQ&;k$<-(;g%<(T?+c{e z1Bj|qzuJAS6DqHqV0!8kn%!&Cfg4kGeh*y_)LmJJ>G>nY^>9)NDVixKF4dJT-JF5( zXQ8XGh6QNiwa+s7=^+T3*)B>))zK_ zyMpSWRI%rV7zno@NPzTJ*jOQrt=XSp7JBs9yd|+6OJUQik@>k8Bk=~shM~aUj4fcDG`*cxW8lq4{QGyzsJ-wph!TC?-0;B$YZNX5UpkFPnDzR0#{=&yp?>MSFhlSoGFo(VTUK_%Feqxz;#$s+K7zgva&*0eG>Xg<8|pZC6>Pv^H|SFpB8GFpRK=@FQ|T-lMMHb@$7I!Eh~;6^f2-v12ew1Q%zfeZe(5+E;A zi#8X3T#3HM_U8uF_e+#TUIQfVo1s^sApokj9rv8GB?q9Fg-tRv(Z8||`4kawRcNKLdKI1lLx zqdM1qiFUE$=D_;wZ361j(ATkX5bt8sMQgi)gK}4&e3Qc7JZHrHzzQ3;VH{En>rtXp zH$+5piB*pX)Eli#8tkc?`m0=I{d}{W*L>tz2hFG*uG9IM#cR^36@IU59>`VI(Pyr1 ztMflh%c`CS;t^FefYj}`uQ%F?k8o_Onj<5E@sz2G`2wzObC6vsn{k|c?a6?Ev!C%8 zdPOXKeaG07CuwG6H1q;IlUO+(?xv7%t}pz40XVd!WpgEnqYb0n8VnxdQZ#S8lPQbz z=c68=bod2bO7>W#p{^*{IQ>4)8J$sGQdLvau(CRjR;5!1%cFx6e$;1_lW0KU36!(v zd-)J2AT;=EZK(OE^wI6AG6%E7x{!!? z`Hxg|$C^E2KyqR6GQQ_4X!p989GL~a(8d!vNt=B~z}J zbw31(+-p;fk>%yKdFcm37|S(OV`Xa?`|o*kOh?rpsZPl0F>Ny?uE`Qkc#GFayN&l} zsc>U{Rgdj|=zcBOcP7|}suqXCHs%LfnsseM$s9}y7B3E!IvJmQR%U(+myr|NXP)w4 zI~nZUgHyx!Pi+y__tB1-o&r#S7Vfy2(_OxJH68?ksg+1_-MT&Zc|OS2)c9Q4o?cxkcmJ_ z-`v~z%-#opT<3Iz(}}57q=)_xx}>3i1LvmWgk?H6Zt&irqH_ynH(w? zw2<|13G-#a%aQfHKiIZyYE--T4TG5@wb{)Pn@7^;a3i%{qofvcf*@n5mJRFBEp-iC zXHJdO2s%Wgu?)34(DlDfO~0w9U7jc8{}j3UO>>8;ZY6d1kwm_5ALGX6AX7$<# zV+Y;TDP8AKOYM6a`)ezMFONCU6^DB1li5z216nEDW+L8jk_T6?dFj?E-`1Aet*ARWp_t;uKqU z8B>==Ml31tA)J+L?H-s(G3@!*>L+-(+>tjkX_!@gnscoqx)gn!exvu#yk`u4F6v;TfhPz9d| zFiLoYyld^OF-T@gOsBJIUzmoU`d(3$3*Q0X^}+>L>MC{@zo|}W_jI+D?N9qPIySnD zxdGiyyhGd{ePZx#@WZ-1wo@!BG5zsVbe|=X0XmNadejV9#F^Vzwr#t#TmRMaiC}0@ zO~Csm z@jz$8--8F$1ic8w)v5Sv6n-%d^lWpV}W7N}}Vf8h{S~ zubO9e2C%f3mbjr`d3e(9MRcn6!P@Gbsg+AuL!@<=tFNHU8HaM-H^_Ezsvf)Qq&6|J zfjh|j7wyU>UKBo1?OL5`x$0If>0C}!7|8=zWh=S`sX3GE%y+@7 z>>&8Co6!Dx(6AaIgIwS>42oBhWWuXw&EYA&AzK5T$~^)a$g!O5{;ylC?uI>Si%F5U ztlOI94@knm__2^_|BjH6MEbVMf?YcALA>)u8t3IyoMo2%9a-`fMW7|@wejB?k?Qi7 zFc}eW#kpZvo^!u@d*rz9iy1IQBe}=GqtL(Kj+P$u(%U!T+x@qRg+Ar=+YA(A+Pw+I z8mHbi>1?L?Z{mN_O70lk>qU)jzcCe=q)2uE8Fh`P*!p_kPSxY( z=K1;h25tWJPV&ruI_KZM^Qkbik503dOKkVSO9m6$PoAuXyStPC_V75a-IW%*`SG)} zm+SQ`vOu$27#l$Hw9w*?WCxp&T?P-VDPJ5d%_eb?UtgyJ6~v3DpK#0;b^0#vzX>b{ z;v$dJw?1Nu)>CwBX<d%Q{IY)dRGfrX@NZ`I#vvy0tYz z@*AcWdoR#bp55CB>fJfjmL7Gqs=3-#LY8D_X%I$a3d#2M-|*|n;!luWlXsw13SHU8 z?Y6>8j9AFA=(DWSYLbYU6w>z`4bvJAP-rhr$z zr|k)@$uyD4Zl56_D`Ez4Pdtz%%V{TE>$C)hc8}5v8#k`aVKhy2K#VJq>_PVu<>+is zc6b0KATB|Yjcbg06Wt8WOn5tI_hn8^y45+{A7 zP0UxZ^DfF_!4k-N<#-Q|qQlm=WbaMr4TT&5S_55gAGI@zYX?NSUOGxI+Oz?c9jC;8 zkJt~clqdOG^55vQBKkM)I?rBSecq(_*&eLJzCI6BcmR!gxK}hXND4aRUl0nQC406` zb1bml*dHNJRRr;IV3>SerV*zg0jyQ%%5)3kG}jbz*9(3NQq{WNbuU?kCOw=d=KYZx z)2HE|gJCQ$p=(WSK7nPG>pkOsku30}{o?CnZAse#p<4#Ac5?NX;0d*vuRVSXNCG=5 zj4#7$2T(bXq=?CCO!&eE?VRUdy6OZ1o&2klc3NNVxKW&-xE(NKd>Vfx7;=8mM$7y9 zDk0d*1U>@R@3^7eKCYe9TS8S$_4y-JZgA-Z#!b+-7f`gF^wNp873AyqhuOI@nrE@+ba&Y~N0j!A+agyyJhs>Z08m9kQlPx34e5duAb*VA8 ze;P^ab2FZmt#0Ui1d>WWDDbzcGqjUrQ@bp`l}R+TT+=ym&V)aexkoybbVx#(5)PB678DO?sKZ&16ey79|31f$nLyl<1K#q%=%~q>0nqZfGxq9 z%c)e$LHd(_PA8nP$a+P=tIKCw40@}ZczHMco>*U&pky4VO8i0fM<-sR-dw589r2QQ zu8!P8BBSq}*bR3nD*o3~sb)m~a8v0l`eJk-+v5&GlOJD?+ID*4q)FNFt%6&T=6T+dxGUSxXZ-O!2rqxhBL znTdg%qQ2E|l%>fp9h_|rrIl>lbtSC1wg!-HAm5icvG+R9_nYv4Z(0VzWCEz^ppZnT z#9ruHiw7c$4jRb$R9UhftSI4aQIXO9iEc_=$cdloxu>^!uu*u%Xx!COLB>)BbC>;! z>{ZPi8g+b=@=or@S2WA_!zw>qLm0`yCw8?f#(o5q`ARA|Nmr0jG|Z@{&~T!vrm=u0 zGS=Vqya!soG@h}EuK8^qI;G7L3~k1*+zrO< z9ovP2LuHr}1b_aWYvsK%tPaU*h;H z>4yeb&eU8l2B68U%cwB8B~coGs>@0hb5sj3Pj zZxlPFXba7baFXmMwUx#^xm3I8rX~i=sqxi6ccPno??oxtpNjNPigL8{I<|cHMVdnu zJt*UYjZN`Qc0gSLr5?KMJr^}280=`8kARHXXGm*{iT6PhSxW2{}Az7F<1m_xSQ z25IiIp!PV-jMumnIppp@Xa!31gUD@((Nx+@ zwWf!uDv6@~K_s(~W{3Lwb6{}6SfWj7WGHbz>1vD!yMOD3Wu0k^LFtu(mB1#eUcm6n z;xCNZg{;-LcbawSCEvTN3oo1S^!7T@9sbJ*KQ7t?Te$qXnF5`-FxSn?$!_K^kYIuI z$%DnI^75h70u3mnMXd8X+4o9{9eoK6A2nz9S)tfGe`0@Sn8II^m+t7zJhxTx0Qr&& z7y{p;z$SCxI3-Wr4oorN(ndnoM^+cgJs97EO^d5s^H0aLr*|o#qWfVeU zDak?wZOFEt?k;lpx4quHRPW(Z^QsQ~QeHN`{`a*wbEBg58EP8wLX?$U@ZmGve8d2V znv_Dc51Q1~HSAvU-F8m6CNB?#Q!5rvdmRr@pK)oLoccM-sI?#GKsZZopy7)Mam~y> z?L?^u@>1FwJiv;g#K-Km>An8ty`AxL{6z7>C;Mf~6{LqTs8?KGK|Y4sx*@6fPC@NVzL*2Hd6qU@CWl_? z{~2;OLIcxh4$MHNAd`EXexmOwqiFLh3XJ`nn^Jnx(zLI5ZE&oB#!Fw)N*RJqbY+v^ zp}d@_X@kDxs?C1}mI{2O626GkYNSvilRvX#YR|^MH&KxoOZD}@} zI;Md#BM}*OMJ`y*)3T#AZ_W4nX?`D?AWT@4lDiSF5vemxPG=ecW9sOB$M`EN-G?W* z)noU>y1vRkzI`*;Y`n+p+95`v#Km>LR+60fFQ1&5AFriJ>|qFg=y%dchCk5L*o^Q^d8@82Z)vu%i9gUMBpwEK zaA+u8Qa|5(RN<(WY{eg`DN8}9L<7+WSQu_2G5*){(~I3U146zfbS|OP|5`(zOgT6h4axlP1BK^N}an3XFR07+Vrii zMTPQ)R$hUxuSaRGTKt6}ZOA;CBv|Z;Ere}SS8p{e5I@z_LZnsx`(E5|Bpjbg$T+>g z_Az*$6biME z&~Pm2KRu$*H-Q}#Wj2~y5IDLh40zZes%hpzB0uFGE0XjI{fddW1pstl*e*LGi0S{x zj3by>pcl?7aJhN-y*Th@_!1%qqrme^NHM~?phmQ|@UW9EbPzot-ec*bAboW}kLy|w$@@yAC@_!>KG0dHa zB{t~8dsC+@n(Jk9P?6|pYShdwUWwrjKXuEl;EE*0sU(3oazmr|F{LWm>KMH#%FL(c z{n1GWKw+num833oUdTGo@{4qn?ec>u&571DS)6k0oWXNKio}kKQ{ra}>&njR=52JQ zPk^^`Eyl5}#4cM>0%*jertC*`G`ne*_G2A~rip)~TxF#Mdl6j*i!%0v?BYUCeOS#| zbg$tseZRVpA(M~kHu{d#IUiLl3^{6c_TJ~+Qs6L&=OvV$!1$eCUUkuh-Ot?_1?R1y zUXsTt1z=~IM+98^Tiq*_$KMdVrbq6aHyqDkU_&u#J~Gk_C{@S)%)%Pk3U`R!iJm&F?! zbj|{H(F}j?eQWJsGuL^iqtYfBm+`HYVJbR3u8vs6T@lI}dp~WZ zI0z_Z8Gcc4$*!7a&n9eh`)!`OocRL*rNdQsFFZ`hhh%O*A_u6~pN{>&_Az+Y>KhDb zfkseYNSMvXRKJWpwv1O}L_QPCAdhXTZF|2j!Vb*8h%iY_N)ek2(CXA(GoYvCOyx67 zvk$9y__T1__ z!>=~Nb}X!i`Pe)XZSAcIgfBPGqHOt*EYK-B8v?z=m@M3Gs)l|@t zM=F2S)hTbPd-k2Br)2pr0A@h06nSg+euf?CgCRnY73Urg%!8I7YN5oOY~K)Ao1Rozinr&sP9>|J4=C z2L@fpK5HdlJJ@vS1MkA6n)06;iME$NbrQyqriW$aV<<^NpXR^D**{!_)}Rc`M>3T? zL(D&;uyZZC*Y7R(5md>oxLv#F4JveJIeuSh?37%1f?z0nr5enqEf~~y~0Ho#z~7|<^0kR?zlw<4RI~FR?_nV z>0v<(DfWKTFe&jD3{Fd|6WH2q_x8}$ouG2?134^-ic(??Zutr?z(QxFHZBU%5R9jm z(wi-WR#$svp+xnphpNn!P_hH3*CV6L{L#nkTAMSeT?XCDyeHIJj~T+&fu;*ln9@nZ z2fNKAn#~q_u#y_fCg6q*_*m-Lh^`j`tw>t7olW}`jTGnZ&ve#}{r%VFi z=rW25`6yI^?YPhS2vd~3?$R0C?k+p(UbeFA-ppOd!~O!sH!tSWFOlcRpj~0I+?V5S zs86bl<&|@rvkAb?>X$?%`fuLYG)j-V-ma-W@TsYqU*K}mZba}3=W)~I-WyT1$dU!^ zggxIJN}ML{LME)3Nfv5rnx!;z*$l0xsCxV;dE~-XaXxythud$70W1HbNnDw=R4Yn? z&3Vh!wv?9-N-I0?0;z_jF3i1+rs{UUnJyk<+cVMtA34Bq($Lkh?u#2D%wtKq$Da5c z=@Y$3Oh>f5O_zsZg@VGH3(xPryB+YN-STdE&jD4ilbY@Lylv)CWux^_kZ^DGv7cp~ ztMNVqhBk**y}ul=KewS_F4WBC;qOqQp$OX8<)x2kDJ?*B>h{n$MzYBE z?xIOVi0<_0mLEOc0L^@1L#gXGE`&pKCf`0%bK%6 z(#~QG)wyTiSQ0N0_~e5BpeL;_y_mvis5-vY{9#?lp>OEsmp_3%LBN#$Lw|_4MgaUK=oCPNN z1351^(EQDO1^T4!t=*$63r0>8{c$Lj_1kP^qgk13Q;p31w|@&iGAEu|u!zL3Yb{)& z-@DZHPJ9(|;Y|^60`SE?6?VlwIeU3G${VM4<-d*h*G;R0Fn2kN;cfkI0tr*9a1Y}H zpWHlaKQ%g!5XuA~%Ro0T{H}R-i@YRE&+Ql)(@o9nZc&|4=1}sHrkEa$^_K=D+VZzm zxczwRR(6m2%MNd8@YB;g)PLzn?clTQ^LAw^;NeD?3RvO8BKmXafb+VOb5@VW$_Yp- zDLks(z1jUMjL|Ssg?ePu;TQ@U2cP)plw3jylKbB9ory}beK`3(x3}{Pe^XOogRY5# zW3J54QS^?iYd_z!hrEldHC0QvAIkiu*&aY6Z(K+vj+w0v1ApRP$R8wSLiAsQg!Z&jp0+`?qWR5C2*U_5u}} zFY}9v-XT0f%vzhVX=Fg{5oB$G`MQTjU$)`i{bg1*EdoqV&WMP>XXon}%rCw>LJR6ufRno@YuW(|r~{;yu~RGlL-Pu;w-!M>Nk{dK3x zO|*sW+XiK&1g%eA+7oO6AnuoAc0X*XHltephsXWi2{FP>Z@03$4;UTVFobb?)6Qpqb;C8XR#mP6 zdJD6-Go|czP7Xj%US>cosHjO#QL)&qYDi7I8M(kMwcoDwPa#4JP<03}SN}+z2u7=% zu;Fa{6e(Yuuc@h+ovN~aTDAHdkV>NsklP#__7exs9G7|>L0r623mGBtLD*_cCVqK= z1>og0?6M!5(Hi;e+9f?KTfcKnp>iWg1MXnnm2PGJN9x;I6QJx@N)3p_#Q2%)tD{GI zPQM$pb8KgADtiZ9fmvUgch&PVM+m*->oBQx0^v!59bF5nuaX>S_wWD*7t4h)ZGfn0UUj&9nk=7B0Yp{*zNJN3V|7 z(hPYsAjM&5h=+4KpRUnBo%~NTw!<4*Ez{OM(sa%{DW|T*@{g4Hx3A!f$K1fMY5Nd-z}r2M@A`4C}oPoRRZmA(xE$2Ge?i6L3+_yv^#Gs5xX&| z^S2(N$|9b*L4CCo_};HFeq-HPM+&Q11nkzj^-DDepjSB?3pi%xaxqq7bqiDgV*nwP9k ziC04k@XS@WYu_>Y9qXb$Qh<{L#}ANT8l4i?W7_lG_eZk|XEOnVtYVDbUZU@>`?mlCp$TM!Z54!;L3C238ppv@|gbDHz5 z&ET&uV1I(Bi#pw1*6GP`E0nTwR?v;lLSo%|v%O*w@6{Db-CByCo@KeY0%;8h_bL#X zc+Xu@0FaSW202r7uPxI-E*#kIK=X_!%*GuB&T1E8VFO>hVR2366$ymplPK$q9L+s# z-}C<|JBnEYdT78a(B9nG_WqVA_b6Gtp+ycRhj3>>cZP{A;4Fwk7wrOXVAjb0{rjKs z1G$=TV{*$0pCi}p0m>}SPZ)EzOp7J ztUxZRZq?+|`Tm*11OEVGi1h|(5<>!Gk^||p8KVDOMl|4-4YL-1!mSdE)s>ogQ6`nJ z{=S6*=A&kXTG6>_%vQ+&=x>n?3pVq2HlZ+=mwQ3E9QDz(mae72;{aMPRuE53Tmtpe zfN?L1KZ6}v#a;gbi9id2g3*V%SC~&qkeGaIQPd_Ky8s`zZYA&Uz4`LIpJrGxdBxJo z+&5;G|8SN{g0tMP!*QTdG8Z@}vArzjRe%YJEoQabo2U0g!8gRd?euKlD;o0mO>U;4 zzAf#X>n?c2?0)$_;2wg;YT2{M?U*0|As6{X~PhPNz%hk-(G8{3DgxdwuB0z^B2G zLxbQyQZ?=qsLaJtdXAXCh`^!We!10PT=UBIkCc7A*a8j3Dy{t6I*@n%4JdGyX%u8> z{0p`G2wHPzs6bs;e^Q9?j+Ok^3cGJgTzHP)ykvwy{b1SP+>en5vSulCP?AVa2o{|NLJyOpBf_}uukM{OZ2Su|6H;UX zzO_xe?6UO!!`Qv7s#BfWSAP3YY-%L^4Vq1BQ0XU3vX_nkn3XYTqW5oI^nP9ecaRsK zYX{gS7@fKogizY^fhCq9dCS6q$AgARvrlF&Jb7t;)?gg;UF6SJ(PLFU;U%UR94_uT z)DB%LsbGo=J9x#ULv5x__6L~zZ&A8@BxPeR9>&E)W&aMOmn^LWjY*OiVYnV;V}KXm zv#PQ7g`l)T$HQ33hkasoCt4yQO;vV15$a3&pt~U3c_<$w_yDaGYTwWVjdC6^HSa1K zF+1}9q{5`dV@UPJ-Pw?X0)1wVDWm$T^-E`&q_lIIG1#6YS+r6IlHU0e84AnU>{VT} zj37yaqkMBLbtLG}k{(mK{jpxt_%k&Y5I{hql<8AiqYZo|RL+S5F{&4Y7a)B)jza_k zqs|$G?k3O+spo0z#Yz9(eRwI&Rnv%_ou2Gxc>AVB7SQ(WD+udF>q>^Ec0kl5RG>>Z zPVx`Wt;eCR;=P*ob3I+mjCbx-7N&!I*^B{+*NRE)L}{>LBxiyyA!*ce@9 zAvZXrDA*!@Rh&%2#Sf#ES^#d5ZuFx#FV~G0$zpmm9_4QlUafnT}Sc zPlfG{%tsMEje`8ZRY_0CBbfLN<|~l$32H#AQVg@)nQFQnDlZpz?*aQBfM z$15hn@tT|rrmC(aHdozd9v_aCVUpv8(r zU*$O)g>RAvFmAP|ePin!MlBixED%clyBSd!dr3C)s;+6k6+xrgIZgfp(+Yv&!>sT&X#A9tL>_tE2nA97h-))W|LMiLCuuqP=*gmmX0FVB zIxxO3n8J-{DXOf-cG(bBJ9D$I5w{OOI!}5X_jB>^(%&};h>J>_j9uy-sB$lWE^BD9 zHzycaaNnn@_~mG@tYe#OI^N3K2c^%b?Dj)`F6CrA>wM6p<-G9BH3)gDDo_x89Ai#6 z%4;tT%;+cI>yed31R6}KPv`K-8m5f?x@6nV$$!|cl|NSsUZk%?xg%!Jn(VgGc)XRh z?R1hFuy7qU;D)iuty7wd5|z4seU2?M(L!4esQP+*DC5R%lf(K_%o3#G<~Q_o~sDbf6N z!1~5YdB;(H7HhPM9jy;$(vRv*Ir%docwhvSv-5B<8^Pf)b{8(B1iny{nKY?75DWuE zq*dGQE9~FDg!rcQ6ZhsW#vhu1k&zgPjN>~sRnt9BBB^baiOjnbE}n{ow~lFj2qt>H zT?rDL_$fJqywJt<+TC0}cqrWyexB9bTy-i;ig&vurCq0+`E$#vr`;qrf04Ws9*|y} z)?p(Ty2ZD9AsG0>$K6jq~Bs4lp5=_3M9UgsO{N{@mL^=*pgxmhrfv7vexoWTE9Wk`05npP(=lQSWJ`Wqk&sg9tm<-a*$K(J`?)OD?mK4SQ zCyE6yfYN!ld6k{JFDY;@J)1=5g#twn_iAo#Fv-(V@)J6A_*O}O&OR1CZeumxoQvl| zcON(Y2{ZZ$B%c1-CPlV9yB~-x#7493-d{8?eKXhbGC$k*UYP2NMd~n{DGn0%v9Lfa zF8kGEUikAmka{ujrkIPkt!>x_1iOlhUpCpR4L2G414!s1e%Eb6rC%ITSlqFg!K=>X zg}l2zACh%D%Ow-&QLx~ zRGs_aI$EJ2|G*x0c8By~VQ4QpNHRqH3V#eFTH*d0|E;#%oRU4$duEg=*-hNP3sW;0 z5S}$1C~Z1eb@TGXhn(%xGlVJLo0$-vjVPpK@!zYJ3vJVGt$KZ!O7aELIV{O^q0dx! z{Kn-QY3CmU?QmH*bo&$BJ>Z37FlB?^$8;1pBMWA%rRM9hEIyK4GG5QWy`6=yJ@PU3 z`{dCD34mQBA+4hg@v)sZ**X_LW8laDcks06{j+u1Z90lide=%ScfGKSzLx%v{$5ty zo1(NDoyEG6qgqVBxqaZBjU|;eD72QefYc#7c(1Oap~-4RD)5t^~(Ul>YSowp9wz!hf<=qO`Wzg$JTt9od_m z@U0?mmdq0d&yTJzgT!B8N0$fu-4|aH=myL)>oWm|v$w`{t-dyq(YA!vf=>wUu%Ifu zi1&2xP13BZJO+qujo+nS;jI~vKwevNAn2&H^E__c0?v(hfA6V?oFlaRsh#j$>RtzA zS;HZVN!SIWH~l{rXqA&ny=NMR$29}l5&7qUglk!!b)#5-7My=)e;i1+gO|>{iu7jN z?i_Uka>YChF~H=H$FTmzjoH#RlQuB&chHB~k4kc6QP8VwKU@kB=}Wid(VeA*E`Ldp zdvdHyEy)`O!c&;mi(nNt;ZC#;oo~4MHBMNx<5_s_ePoQXvX+*S z0MtU6&=iJmzSy>K6JWpM8FH9 zW7Z#wtfF;Vj%;2s?|Mj5iUuX=kLGSCMC129@4O#_7oWNhJWW1XI4$jt9&r0?*mVZt zeA_w7feK>)q%ODCYu!pn#4>I}Uew}cd>djqOwc0;?Tf1u$>$q%16tlS=$RfLun4l#jX8hwrm?)8w$+UT;=2KixCdAfxFe$^A{Yr$eQ840S}opi zq#4F|+9Z8ov!$xO;()EIZ0+@-KCulmkb0~|Yy>&bQ+&6)STTV@ZNEQi10N2`zU$gn za$0e*=Lq@>!_}E6+9&EO9}(|-iZG$c`rQ7y@9kTaA%!+CX%+Bt<+zR^-H!NOGt(7K zZV}v98&vv}iWZ_A^Sg$1h&ppj%xwXMiK*=(&RGkwn+6Qhp6H2=x<4zoA$c>U*4w5u zDH>V{lu>&Jc+FrFiflR?KAQMJlI7KEMyPsrXFi1f@}aS;mNGCOqU!q-3y3_MevIe|N3Ux=ecy@5V@ddxK%E5thD0Sw7|hR0pXoNLQ# zf=5~z<&!<~je~A(@V<~SmIvOF_U(B{8Q+h&|LXgGlbzB-YO275`A2WuTjBG@h9|>; z-pW|>a1KR@iED#Mhqpoq0UO!pvV{qUPsH`?CoWH&U8LjJCBV1s)mYmwR+CkhsBOkD z{^qd~#AXcWr0+k}KwVvP?AA6J92ZGuao{vh1D`528@VV`=n~u|>KW zIxDg~;U4qVvdjHmBp}%^@oU<4*Bf3XGZtciOdzUQP}J~<_3WesA*PIw4&rnpRB#&u zo|J3BDZ+Jdt=F7A9arZjLes9SE&lQ2(d=*PxfoK0vGg6=BP3CL%xI}%ny%`1sLVWJ zhZ1+f7ywD~Jflu`giPgn*SMp=KyqWG(ePw6kU)8{t8nyM(2=ihTNa_@W&CsDy5M(S z3cn)UK%Al+*G6#VDwxAAZC|keir9pY9CIVAP18-X?}jP=Bv_t7oBa3 zQv_~lWc^R^l%%RbewHK~oe59rdZV-Gw=wR110S$`_|Y@5srqDEc$X}naxKFzV#+kY zqBkP_#!dfs#Ot!4Pz zqXYg3>tEe_$##Yb6F5Ks)WB~fB@j82J0FQU!3;!rD0lz!=p6b~(~+J`B&rk{$c>s8 zH<@;Lm4_tq469Gn{m{oR!*72Di1HcoSl0|RchLl<5}J?x<^wN0uuy^{soA#i5mUE6Xk zVa3tfHwXVt?~Cz9=#WJg#-a%GKf(kQwI=@eRin*s+d)%h?aRiCc5$j3OD0R&TcD18 zCXR}!VYxX94g()7jiTSpmVZxr7KK`OcVu@9rm6<5YdOB(d-F}&?u3r@0n?M^wfFr5 z4k^*@_U(+-cY*d3PSU}-8n+1#ThM+=el)e6i;gIGXP^35Ue;tL6f7re^P6#5Pp50F zKjq-tj69rhjK^P4pwS zS70`nI4m&7xMaQ3(a91`nBx~ED5a4Jnl6D+*rbo%d|!s2E4a7_UI`cExHus-l4W&`mY+F%3dx z7L7!Mx-N+CevA&YNF3Lw0E#(FCFbKFGnileEl`O3$l!k2ZO`mPC5pPmZVNE<2l>X* z$JH3&u7M{V{}d#@t=bGi6{E#D>%JeJ6+L0(Hs1YUt(=tqqTaRO*;`^kO(i>GY*O@* z(orW@9H8mp*?PeK&i`P?)e!&$!Uo_YYmk(wH0>Fxv|p1mCU_4*sIhJ#^iVG zjh=}47|#$GT@w1B~Je11$sX+)t9hsQokj9UOj$aF^ zomEy~ig>RPTr5R(wELk0yBl{@X9T@`9+_VKoV|IeY2M_m>8p;LrIvdWCDL~#cG-Hi zB)Cpz;fHwr*H*8nXnQ=IFvMD&5mckh+}yl<-WI0J zqupVEyAHnfPqs=X5Clo?j^S^zb^_@(63c3Yr-Q#syphXTG2DW-sGo zMQNF8x84+W_6^KY>#AM{_go`wX)bM|4U3aXKA6H5(T-NZQ{ts>HE!%F-2Ny({qJ<; zwOW8505DH8`OA*iwW1_5KbRVs(ByRrvm-JpBd5&se9S^@D54*92dcVWYUY!$B({`) ziD`Vi0T>8rQg|ePuRXzT@n4y@+jxTc0JY%8jm+x!qL@zt*QyPV+!uL|w@H_AZ3H;{ z4*JGq&9_{L0XZ1mCf-(kk%FT%No!rNf}7RSlT7k$l@UHSX(J$7!#*%1G0ck-eR$- zjrW~xzUpx;R|P^fBt?CC`fZVrr?i#`QqN!ItW)5t#Pc;(tY5EDsUe$3f!C#{JuU|J zifUWB?uVqIX+@1YGSp|HC;)Z^&{!S9MV_TG4}KgnUNz{4%?`zC@Dtq4qXw->%5{EaMu~6IPD{8BHW=aAE!H@$t!jWxlBLeu7|aHLMPVYk2jollM&0mzF7Z z)87Hb+Z>gL!IzAUPoBKVJNK_ll$s1+rcRpeh(CRYU_bu(X7HEBbQp~7(nRwB^;Ra| z_*m5S3^nF4wbY1$6T=3Um+z^&3H#9$6R_nF!glQx{po%zgG*g7UI!j0n(6rZ;N+cu zWr~vT{Q}BXkSG1dLTTBjEb~@SwVjo?-;yZH&fXmV#ycEjB5+zK0S%YmSgnp9Qa;u$ zekvoCnsT>t9Ak5HW~(*A#7uw%!Sy)U*iQ0)iv#f(Z-4#%$QfUeeJBz%dYFc1_c)mwp_t@8ir;Cspc z3_v|tw69$yiccQ$D-JgZIUq&0tD*~-S{hluGshb}b&d`SJGIZ`%m0n%ab@{HxRGQn zuF{&yz;c;SEZte=>JKfzJnn1Th{zE_Z*r^xovPdkB$j4xsT=U1V9M%x`5#4+tjvGf zJ_CTqI4@T~4NtRPl;{L!2D6&y#~S@p-YPjD3ZB3gN8&E7OyvoX)74bF)L9+v?X%YD zy5_x7rrNUMBUF|#i&SR3V3hiPgcoxw{!#e4i3Zk&#le^y{=qy%KKZq0E{3!)`_)tg zT8!q7e4p}_o=Tk~pY8UV-yvdRK#(N9xzn$d^SuV$4U5pg@xgKMxhZj8Q&8ovqE?p- zpXC4tnBqKfaAdiu$#ui9Oy_CCYQo{y(f3!w{rRVJ;$C7+2!0{Md=PEi~(MI7dKf@0y-EGB^kB$}z={(2 z&4u7$_pAJVwn^L$It0<3f-;E1z!MFsDOiBGNiwWH^x7v#5ZFR9->O(hYJsbKjGrpX z=Ue%ny^AJll33nCjL8fw&l3ID>$}Qp4Zv!Ee!mE$)-)b#eb}8HmGuy&yus9@scxv5 zg?AC(^euWxUbyxbH!|-LRE;ve~=zj_)EOLGEC(Exdmi!QghTu!C^X0y4d54t+>0jzI;GrM1J9gu#a?e~YgG@J#fIxeD_Lj`kIj5Sg!-a$?j zyqk|eI4^FhUR2!w6r+6vaJ%Up&JCr(9Z5#pr1s5~Q|VPWO#xQ|#-ma@p(L1i?o0 zZ*@H}m%#>NiLp>4nWfx^E<-7@-nrkG-I4YTeiv8RATXiY+380I0k;uXAyuzV1JL)lBR1rr>! z%7PQ|(}858#{tr$P>ahNvlJL@`!T<>+2Ex`RHQ%x+l-^V7T?@7mY`AqKQq9OydZjDHI87l)M<nE4 z8nf6|>`w6?t8!WI+2gh!kfx(C8}psyqA1kR(&+8xRm+6mVr6I_dCrH)W%`3qTZ?`XN&B+=oFYYmEZu!9; zApt&DSyxfNXrG<;Kafo@6+zpcc%-UN+OoQh^j2;+f#%T+k~wrLIveta;AJ*`cJ0jW4tRLeKeYW%`n-i zFXch$rj|J-X%QJlEdmpHZi3sG4K=qcV~65AC2N zD9$P?UKUJBQculfZ%wJd-3wRQ#q>kzcH1t?Xwp@#8!TDnVx{8PQiRg| z9Q=An)x*oI_rlKt@GWaMaNFjiEz9_IHktopF<|Cj8CkTSU$*31je+gq^UO=X`|eZ| zZbxzH<;1e3flt@?r?Q?aotSy@FwR#-9%EU~Qr)*LXD+f}8TYGq%qjKmsyr8|xHxJ< zJ=an7tgX1JFS*s4V?LUv-p``&WOv;$4moC_6nLX^RvWOT&@I>M(D|n-YcJ9=G9St7 zDXLq<;SoPES==-s5%G<;J1;I!qc71!Rvh0#+d_1a**_!Jl;1yZc`IkeqNQ;mmOC5K2fbIr!~D(+1sk1#JCrVMd%)q@v7-2X?>Do2tdGf1c);Be z%yeCh9=IQ1Y8mV}MZMt7x(H>8F?7l+vhvO{?fq@j>T5HGcX}=~cq>tCKvd%{5i5oD zcC`Pg<-6GynY0KB|Az$FgJ6HZ9=!h2)#{01-GLpH4@{t9|JvT+^Zm%xgX+#T@qgn= zs|XDvBO_fWGja1u*#=z?1xYPbbW?o?;|Ts@CNQ^QJ-#^Z!tr_7(fBA4s?{aR_$9*I zi@&^JtvpTvH~ErKQzXiKK;g?7)=%_{JAcTpqG^{Hu~IzKQ|;{Q+*%fq!z!A+yVzY| z=@{F%jMQ5c!bi5Lan3hTeD%mlA=DiWY|}AG&o6+^NUztPoMF~nrZvTgLk&CP_Wo~= zk|&ZR8g7s76lotbKlD4khz!lNIp9Qm!>Ew&7<_hY8g`9dNv(t=oa|tnVY08J)|V|U z5B@9j+M3pf+lBD2UeQ!#&a00pafV`5HA4JXOJS`KqoWwRa~P#3j%SS1MY~DCic)a1 z<|TTjIiShS8Fg-{TWDUASzeLb9m~&^i$iSsf*FT=dt20GW)RqT=1IXXQTUN>_!k4C%SKAGd3+$}1_*C&5Vhx==nQYBO^GUMx(!|Td|d=&G zJZ(z!V)vCcX!y(N$ygLRvk|=?y$XjX-HbjaSRpTnX2?US+BJG@nCPRHe$%ygbYKQ`l{!};E z>=?g@tsj6B$KN{aQ*~)=aCk|}Nl$SI9la4F+#7N0 zVaL5UcRmj179dsaA;;QP)#7^+4NZ*$!tM@sT*xRhcH|l3U$^OT!nVvlrf5qOL320b zKt)n(cp$n3djA31AD*}yf@Zv@N{}!C@%i@u{SMU(Ukd{odce32)Y}sU)Osg+qa+DE z>)@5cz#UFM4NxC?q4SjW>pbKje(yq-Qh(l^z3Xi8cS!)N__=w@OH;If)Y?ess8nhI z0+U3E&c}s-yBX+QFB12}q&}39*+~OJh|uXc?JYz7jW5Pq=;oSvbfNpf#9!TCy1@p{ zL{e7Ombm*h{-QaHix&JV(;quzAn6d@UW*i?3$DEW?V+geU`0jaXKasJ;>z)!>m z$v!CP#1^!@UW4l`7KLFwfwp84=lVn?AC2$f{6p5`Lhvuiv?3|`tp{3gl_RkYzwrV> z0vyH%Z6LCM143IlQ;jrbbW;TQ4~>GEHQR+n6loEOQi>QNsKh-m2U|0AfZi`^caW7V z9XRZgvILp4nw;v!tVuGWKZE z6b)??5Z^Boj{b!GB|QeGX|Pr|h}mMrb9I#Vo}OA$Btc2dgsCcxz##0iF;0mbOa~x1 z?yaiDgKquGhR zw3PHCqAYkC&ZhwbEG9rg#f@4u+7=7T5huCMw%fnScT|v)J_T8PifL@S!GKSPv*}bZ zV$3|bL5(*^VRf&Ov3$+re5R7QxO-uP%Fh@_1miiD5}JmE4+fZaxd2S6OI(BN!%-FKn- zG#;oyhgsAA_le>Rnmw8l62g)~Gf^Kagy*rH_ajxF_1{@bLQJCZK<|InGY)Zy3*wOZ zly##)%QIqwr)x|7mP8yb2 z0no}kt3-;cdhHjbIzM&8Q)h*#E~O|6xIyfFJPiezA_qFnYmEbS8eH1l^iq;z9wT|d z0HjpC6~Td-FiMDftSD>?i)SoBw-378sbq3Lqdk$7H$2(Ig1k^1bQD2AFx$ zvpf{M%C@Fo_D2=vgIFX%+s)T*T72ydjkdatnpoUu$fIZTJxFSxQL#QFWsH^Ra?w!1 zX<{DkAnoZuPZGDaMk?U=tSr^ay(WN=ctZ_FV(6pgru>hgPv81e|6iG&5hOIjL=5nd z)NeOSIZ{+TV1O411w|!~)3SX2gUOV5gwa(<4*Hi>bQYCL$A-pZBPgwB$m1%4P5o6H0$zyL@ap;`ajH_0WNyL(i<9{_+|LJ@J^ zQhKmjf<3iLe$;^8_KD_NZeOhD=`L)V&0>XbtBv-rr=fl)Ub_q7N!Oa$&2tfy09{aH!iBcG z6}o+1h)XUDQ<$MDJueMnBocLih`D9!eWe(oo zAfGYN^Kjgy)F)9oNR)B}#Q#FxJ`*{s5PnOr99EN3dEk7h1!b|XYK4(GoVSb|q`B$n zRpf;fTI)eGHv#zz9P2Qq)6}JINLetDYDj_uEC%(yzNdu3);Z+hnBsuLnI0Afz}`i+ z+xm|>i!#$Mb*#p>NJ+#YX>w88>!M-_1)0gI*j1Q!YJdysl9JNyuxhR@uM3+O2V0ev zDzE}5kGIK_qWgWQ@BfyJaVV<55IcwCW_9>7Wg#wdL!G5bg}mF$jWnCME!3~t2VFnH zNAr(t;q@@RXKxpE?ZdY$6X~?M8t*l{@SP+%9Igh3Z*pg!9yQfU(dy`|d2iGdC=hhQ zPO`4k&YK;`O`vA4b4SgXnJMOq!E{;F@iwEU+0j~U=^q1o1ftiN}A|z9S zQMf5z(f+?DdluS$&a43;KWXy~B*0(VXW03Ad^5))QBAue8^&FZ*rR`~-z+xm-n|I& zai>M)OKJSR^G90D!`wI*9uy?~s9dsc(mvZ>TynO9f%~3&-ila)Zamc^r3M%-{l&Xz ziSs!U4NDDz%VMk-WB9;>(#?ykKEfz1JLB<*2jw~jc{P2*;fvHYhIbdecj0AKq@nQ7 z?krv$+)9AjO|HVqpzx7~owHi)+Jv9$Jsu8EUB89y-@ou1PuQ^YRXRPLrb-*E>`|Ml zh)z1`Tl2d&wMb$tV2i^TO*U&d?PAOa`-90XOyHXVhJIgB_vY)xJR;K6AAM|oOjkRR zpKGTcKKPO6oMTp)H~qpWoN9Z4I&5{J|I{oWIs(kjTn2gwWnur6ui*`_UM67PkThn+ z;FZUVAMzMeU&59sP2?EV!uQ?~gQLaRGW`HXx!}tcE6zM=mQ)I|-=Ca&vaouw5&c7K z2-r)AfWu{bHY&MAJ#ajjRIshz+zH8A-&m(CC@x)Npf>>}?LH}S{89GaPf4~Z)7&NI z37U*tuKY`=T`r@^8u%ZYqe0R`_%Vvj(Cqi^li9G83le?*qg#ChVe;`qaYEK;xNItV z)v6TryCiodUQLz0b}GO%G20rn-tgPrE)%k74jeX#fKSwR9>U8@K;J_Q3LF~X`*`Us zj8KweV|nV4)Hb9yM@=E7Ol6?bcB=7`^kr``46?fOtWXIaBrWr0dm(IV1dcpsfO(24 zt1qGKf8LarQ%PG-XXw5vn2vUGXh5DHAO_6>mML7^&8T{{HAC9w_k$dGI#A$gEKSXV zQb`Y9i$)HDJa4AL|8REmUfKj-S3Qu+#|FO@u}eAADo=WrVmk?{|13r_CNJwj{rh9p z^PlYKG*yo=;%9uADK=~QE3M~%?WL}> z*-ppnVQ=aMKy`3YXbVbdEjjloz3r0V?O%ik65kw~E49BiO*&&pI~f_iPdesf9Cb0L zH*LH`eK9_-+e z#9M7%^btgKfTd)_&dGnoDg9@C6gsNZKT4&K`GoJgQ)%ZT@HW3$CXP0Q-(LllpNMw# zMiz6L91AY~nTWL594jRd$~Du}W1V_NGB79BcWSVpG~Rr5`1V2Xc10dLLt$E$eC@l2 zPfb;e7Vgo`E@R4#PFxh;$JpHSSW-0h{OlK_Rr>7?;AVL0?P>a)@!jv7uJ(Z~kE*B< zVP&0puuhl0%ZQ8Wt{dNNj)G(uQoy5Ka;tT=Jk`!^p{lyEH0Y*K)$Xgoc*{G1rkdG0 zm;|z$^m9EynC?f00-}&4R(2Ct8=ud^Z&|_d#S6)`t1Xuakyymhf}3< zHRrFT`4Z4~LgT`K6I?$OZ|FRbc)HY7spf?{ovX`>P9j)0HoA?AmNy-tivz`ui5iEC zZ@Hd9*PG~;o}Q=)sG@>&ZSE0hhzy*0?Dc-Z$b?%T~c2mX71{FmQdw(q`8b$U+e@>H( zzBgCEBn^^o2ep9=5!1@$?+^STH;}NE7AM!p`CuJ-&s_C0Cw+~n78kx~Y3^GA2xR}t zD4AbbG*)4^WGey2B=e zkL`7fS0xBa%3NoQ%L0i<`V!E~^0yq@I;Iv;d)W9@JM?&}Fm$zX1;3igs7V&oheb*9 z10wET$jltK_}>qe08aFu^-R?S^3J^k=?J`v`$*t%3`l0NBc3i9v0yMenobNrD_``# zN4z+<>-fEJ3c+qza-ePW&|YPVdMz^zYfmH0I-61+$Ez{8deTE72NqOjpRksX@)upB z>maqeBG=CAKa7jbmeeOTy>SUqoPYB&MQq1YP9QZq$E)#RTo3x&L3@cJPS*2(BD4XVT3Au&>ejBJjRa@U+<8~}=F$w}HO$;M zH)0_GJG6YQG;h7|Scut={-Zl`QK}If4%stdk%zZ(KYjUm568!Sf9eOpk;FD+Fl?^? zUg`%hY#MZrNSq`&i!XrXd}?t?B^J`FDn*f*_>W z$JyNRh>LN@Is2Q~y1aBt9#}o0K&*>2IDV;SyfIVwtmb(ftV<6Ty?K{u1bIUKeN<280BX z7ECYVq9^5yH9vR8a9fDn+=NR>DeCG&Ocmp6o|sEIE@hfJ?X#<+#SDECfV)%~aBOWT zA}bcZITe}T6Mx%Px-o;&n-#^V!>}!L&h8f1I$i9J9ZBqVZDk-_?i=oXigv?jj?i2>V#NyC_gk=f}+^D^p>nNLOT)cnTI5_uf2_xw|VBFTr zJeb^A8kV}^0rOvk<{V%<7`Rs`S;Wq_w+W80CWku3h8n;XW~d= zB%FWv?SODsN97eV@pA}onX<*{b-bAnH>c`-JU!QUox_&+Nfmjg-!dqmvc^w8PFdOc z)*J+8HMd0Rn0I8Nrn&t;D_PEQNJKrZVO@ijB4!kr+wKF>olfU|c6=`_H7g}l*2U5( zWp!E~R$oRZ_b4W|Q-wzs!tPe=EjD`YXzTWMLFiWuzS$g)-i)6p3pf1WvhF^91EaYgCF_awXDQ=N4UMics@fmI535PelA+6VKOMtJjG z8!e?@QA@FFF~4Ruj%?DmZ^rBAv_MYN3SR&AlkP4dS$tPE`8HGu_{&LmDMW&iWY;>1 zk@6CExAl9k4d4T;aK4aJAB}N`oGJ)%cWvulCfr|+Gwg1^%Bj_Xg~0akYSmB8e|LHg zw3obxmdqu z2VNQp?ILaDL;*6@o{f|k8sx-gK&z{J&!34N7IC4+WtbQkO#rNYDO=cW`X7=w+3GuD(2Pst(l;bMKfY=)P;R z@qV@es8b@1%M#Ov5VcmH9)*1xE-`sAZbzQ~r{?KIG^U5Raf`un$7T;4wTB(KdZ5CH zAG-lChtKGgRx5faTGCf)sG`7fE5e7N_$(V>e0?(37xJVbr&GmG!B*!*%@6ds1KvvO zT&bYwWZ;HXl%8AViS z^v^ZRlej5ZAMvfMbapCZKX}~)5P_EB1%cP>6d2Gi3_q_Bk7;4R-uvAAdRqxS2cjKv zzH{xQwfo+fwuqDJ`9;voG7D{l&PeDz156AXlRzg@N9sl|AmvsV*CYJyeOp5hO}gQx zM^#n%KU5JTjUXTfu&v`!YGdTQ)t?I{YNlu`qN`JzQ!zH8{`5A>@+ikYyLFcEys6F~ z9}tM_yxTq+c)`yu+P!c1Ydvtwj0;XutFj{l@|R*xRegE<>d)r>|EcwZP$YOyniFz1 zsbc$oK;Ed2TI>u{ORY}uOHjg9aC;DGhaU~IekJ{GOV@bB>5m~B?M~9caL=#6|7xRS z#yI3HV@8A6*2=pa^9}W_u~B4{LbBrmY4zbTn{}^y*Sg;&pL5s+zsC%`;kU3AQC;sA zG_agecR*NjAEPI@O0>%YJX1Wf>l*U5r_LljVCM#5^%rJLU^25#w65VTXdjLoi21pZ z`_eEM=*iEG*Mk0-8iW5iaS{?`(#$2ZhH$2;*;{E9^M?@;kXp+dBjX!iLmY_(6KkWJ zOPSl2ptNGa(pk@`hlN!EZX8m2)D-vU+*}Rj!Zs69UrVl)y36GS^a?oGN;>SEH(1sa zP)ce5sPjs2+JJoeb_Ea$Ouow*tle%jw5$7GdmeI%OaXA3SQr}^;adCondMTQP2;*k z=x>)NCCARbH3x?TJrAxrU8gKyG0a8OF$p1lZ+^7r$~aQd=4i$mgMMGO&~lC{!e}#qqfT^^CeGSmwz1Qe+4UuiU)7sjzX;*uBV|jswqG`VrQJJ!rM;T)< z$_X&=(|BL5dyUHS(pa_2liR*(&|BK-b2~;(*iP-$Qg4i7%Hmt=lt=z*3H!MN+m41) zR?E!VoQI71?D(Jl>D8gB%EdB?vYI#(L1%#I>h4^&Q+q%lL}l46N-GGhQSU7|+zOp! z@Xkw*9MEQB#0&gi7bVY;zM`F) z>aF0&h}eboMasJlFSS5;`LhM|*VA;{@hTw+tz&(3+7l8OEJjnP6DGw)avF#wsO0L; zZJy-JOP8dizJ_N@*Eg3kRZH1GZqv~D7gF$GU zt)g>~^99bZjrCABc4-|d^(QAB>|9A&X{OMI5M1&HNTY>z`QS4ACc=n_-rQU=MQh?c z8?_S#KcrkluC0?j+xZhf!0Os^kRJWI{H;a!4P6iY!*Av8ogA%ko!{nQm;1eQ2NrbkPd*Fzg4|snyzRm{XKL-HtdW?v>8fdycHv zz&)jw%y2j zvqLA%i};(MHCSU${evG{z8`9@r;E@a!5AO(boy0FUvH zn#50imX$#nLnXmYqeia z{kI{SXUCT2IysJ|VW7ICOaKGSxz_XvHNeL`1;OSh8vS35tl13}XHw^WVOg8fcU-rd z`N(kqMF8y0>mr09y{K+YtQNT z_t5mKm-Hik`IZ4@(_ryBF}mhk;=!}V$(&#`W$RazfM{A>A=9j5`Bpy>LHrY_MuM4=9@{`^qOB` z>MB$y@}y@-rmFd!bTR`;lC!i67+KHx69k)P@s`PUP+~z0E-7NZC(Gfk2R&{mZ9~jk z=cdsbkfok0Ssb{5*Gyg8pV0>x>S0(oT2=;QYCQ%$n^B!2EM| zHAV78qKM9<%%v2$e!<`V#-3lv;103d%O`&LnDF&2U^QEd|C6D?2$NmM+Y{7G*c3>U z^lkS<$5D#N4>072^$j`waqAboj0bh{45&D1lDURwqcM?7l;bJCRh{5K;jJ?d(HkYY zPCz?kszgt+3;pxvme4fM#4>{x=Q#opbSz=N!~MNM@1<~pnl!QQ-lNl+$3d%{33a@* zB87FqF!hO@X%u&v^11Bu#W1prFI_AkuE-s1nfsnbWM5I0I^t!Yqx7RaW`P+x z8+!6rt=v1IrSn;G4$pfGTo!AFn~I+ST!BLs`;VkVXEu}yfFwu-=@L)-;J~A9z72J> z6LU;q3BKK=CEr_48xE0r;3)R{8OZL*$+msy(&q}Hvf>Sj346;aiSF7R)E@HJS~K}4 z6DQ`8@2=QJj&~M{3VJiot99OO?nL8%Ms)QfJcAgv_DdPfB=aYb1bj~o`xTtwq``~S zY={04jZIYbphE?iP#gBik8?u07{_y&bC1i!Gc|OP#)p@J!Mnh>ILAtA~bfPJ5 z9ePLF-rAj69-&`_h+yOlzyns&hy8))|7roxf)o~`^=1&OU5&tPsj}97`Ql?|t3V1a zexWh9uAlzCtY{-7(?pS6hbs615C47WxJxIIBE4)$I$P-7$EM0q>BCFm{@yeZ`Xgzjy}4J z>%M|p-T!L_P|oG77UJTV_!|;m?Y7QDWm3$7U@3D;i@!~o{Myi@6u;|JW7R3e2UU%w z_v%h&JJ!s4Q^h(l5f_Pfh4_p1ARSE78d-_V?AKnlJsaqZb(e!uG%@dm#B{0JSe9=5 z)*fSHRXI6Jsz$F@6K?Qy``_w#;Cj=Vfeix%BZm>-hl*Oz^@7C}uEbev-R^5wcK)qO z_@Jn+vZ}OkI!Ct&U6%WLiv(MP0xNSdO&3UQk%DINv~mEE`cIeQTXG1WGPmW_YG0%l zL0=bBD1Qy7p`-T0D-yr%`5{7i;#sl@TsM^Z5QFM_-imH4K{kT;E86PLVSjQ&C{4~* z*yiB7UaN%Ra$s$gTrT_ajZG_9BK#|(SIUt@t(gW;^HbXD!p7@pJPE|NWONgw{e3Qb zA?Bu>sZ>1=oaBBlZoHaQ^Bw2$IzhY%4r}g@1`C_Hz$?#h{$(;Nz z9hgG{2g#g6P-)m7-m#V14Vu58{?CI^s&j?5foBQG>h#RPT9!@4anXO32yS-(`?H0SW2yCe?V)(cm2n47OfPyOKEn_?#q?5WtlZzTu{=8V;o|T$fr-oJ8fXZAhknH=4N4gQ&Q|R8) zh6+*0$p||dzjEnoq1L&M*>b@4(A4UcAAtTw61v-eqOtpy^H|t-gxI# z$NGRv=|*Msgj`IUIr4mL>`wPl(Zk}$9j(V2%<)^e!pp8ofgJ79i=?pBHWV6o>p*nL z$#}25tA#Jlg0_;_o6PE;56xmF=$o_6-jH8-npiu36`gvYhi!rH-~gtTvY~Hk;0Y{^ zL$*O`=bHb9+(Gsa0$H1Q< zSY8G?J*A3i^1w{;P?`kH*=;HBp&&(~B_73z=0iLs7a(lgfzsGU#*d#O>Cn2|L;%Y% z=>G*pf1^rn0{?XL_Q&mnRq2(oYhox?K=d#8Yly(zFyy1IXzo80hS8MGdLxrx0krL0On=z?-}5DT(^NKl5>L+fvzxtn+O*3cX<%^K=N ztf|=|&$_}D^cqU{FCnbACsOgi0{#OHZ^O`LI@3sFaUW-FCxw6$&lq48yGMLM1|*js z_9YI5l2XBYJ8|$r5KS4o4btjA*xy(^!L$XauVIu+I$F z6p~-d+1f^-M=AilFGtl|gqPNGanS&NY}nA|T!m-n5p+CC!~Rk;GjsS)r_qr$x@Ze` zRa#6l5iVM;iP-_c(kc5weG-1*zoD?!3hn89r4g(-gwHa(PLYM!<*=)2YPGVKfvYX} z^6McmSFMqxDTd?%s*u35mQr&In#r=v1wQ(x2fPgng>yu{{oU@w$w|6TEozN<&X z?ZBM{Q3ZW>x+1no{C>0@vav3ka0-|m#A~k#Gz3ZKB=q40asZ>HjuG=b=B-YKHt&-%H z+^^dzMMW0M-ByVumyyfdS8hvUGR%D#W@eaeY_skCdwqW2zwHlu?e#p*bI#+I2|Qs1 z98N|Qsn?1p<_~~m8k@?mJE)#1re`vBi{2CEA)pz=9>4Z21H(hX8fNn!~a+P93}{_}rX z?M2vtIvH9TC3T%gb?2S{y0dZgnW=Jn?fa*?AHGjT3u%GI zuCctXH3S8CKWygvEr7*p!11?w|7V2i@S%ak1n8T^!2)K|hvzu9)jA$k-{#-%nh&N(R3JAqcCFWyohdtOV~g(l~W1AI~hoxopm zZntRi&%M)R#FHc)2L>rwMA!dxnrtb0J8_b-t8VaugG3bC%m|jTt)Q?gwYSD z$a~npI4oa3*+wAoD@BD)-tm0?J6IAgd+ByNp(P59SU%TRHM`uR9UGX;{wWrFL9g*a zvGg4R^jm~7L_Owk48T&-I{v9)B$b)%=~suh#zM`$<+hbLPgVkZ>j zr!>yAqj#KWHhKxOf77M!*gWH86Dsc3bwX-%>EY&`{0sw~{%kpb{z(u3A+{t!J+rY| z_6PmYJ^BayA7KChm&A>4qZacKTO;%?QXBNYJSuZ#EJUG_^Eo#*P&Pdc>@Zy=lqwKB zL+>3;wn+hlJHT97kQt5f7;`-DzNn%uONIM`Xu>EpPsRi;Aua)ePGv!njbeF*#o}2l zgQtq9CTWyR-B+@Yp`hB#97=<}^1=YvW@4DOQr1J9i+Dhr9Z~G?Khnc3D%*x_qa?=V z29jcZNClJM*(&LPxm3BHH2>te#nIesf4<`i*o)zn9{g$k+SWBw2o{^6A`4v! zZTxs)w|`LcQPNzXT6l3OMMuN6>Awod4)Dp*Ui-w>;S@wtuYddyeS6n@h6{4u6PKnBG_ z`?j-m4sXM(UIBV23ad9e;DNy_VM32UqA!EK^tDocuTK}Facp6P(}GuJ!{Ie-yMZc4 z?_U}|08R%#oA}3IKGJk)9$7=N!wSyfd`}aw(A|T906UT6sJSBq>&O-{#Or8V?MF>r zvS=O#jumjflaY?>8-Pp6C_$n>G0=9+(5D4ZL7#4Hxb-z1)bQ*ozuk;S-$Yd3V%GQj z%+|FojY!#?q$^1Lwy!!>WC^h|0BmSG2=U~5Gp|ZGpBgde-X6$!9ldX@UyJI$`(v}Y zD>V`%ruIZdbE%}2sd~=AcH^*eQ6oi59Y|b~q-b~OFw>kh_uS2l@DMTx07FACTmRN< zi0Tv%t4TN;t;SeUTU~&Bn4OJ?0Q|kV2vWY1AP)BkFY*(w)~5N#wwOUEsEB1B~Fkczy4LPWYC} z8C=Q8JQ7j!lMlGM!c>?WLgs-Qm;PFECK>KJ>eA{o`p*8*{F;x!O3fVWoz&4Il3LX zYrWzTf{pZre#AmtwYHL(Es$f27`W5e|Eli0?U(+Xtm^f-$_o4^iFVff0ElCr!MXoS z+PX1v!Ibr5Vy8k&wQp}FXVcZcr)A^<@=mw^Hhfvu$M6zE7%8XZ4&=V2WZu1AP4@!L zZ3yN65VV_l5Fh&}4RAE3hhZn-8kkQ|Q~6ap;*p6$gLz!?vM|rH(rZUg7BC zap7B~tjBEEqCsI|*u9*GDPP(5$D%+>HalJpxd1VV2OXsD{R2{jtC!x;QOYFHt_q|` zsn6JK;c6k&Efk>cgOPOuidnk?sjO$hXqn2l6svfiDenR+l|@sxUb6^aLKq8Nf^e0E zXrI#5>TE4mleEbzL0}eC#3>*}`-2c*i??noa@D}($S)mf#%e7iiw`t56-MqfJ{PaN z1B$o6v@}NYMlXA-He~PE31Z6{G@4%UDAm5RI@j*>ha&A(49@8in*Khvx|tM&{2|pB z5D*(%Dl5cJSx$)#+W%g|^y3Ded|OS;u0u+JjFh=36^L?P+rB^`KpwD&`*h(RZlJ81 zG=TbEN_24suNSO>NwY3kLdzeBAwBig{6qfdF{2mS{eN=UDf(DBHOJ4$bomS#<}sZy8vLfk)Y(GIMm}x5 z?<-sgly7~ovLli3__fNzz7>;eX?st}6x5G$ZL7JqM{~2kR}oX-y!wYe!nja8 zh>#x=hLFJc#RtKGb2{PMiQc_XFPxDyf?iK33)8I2YcwN20ZfCBeu>9dc>BQ9%b=_;-By}hCV^xZ3-+z# z{`n3x#irkO1dKz6WId4hWEvEPKz*1#ppp93=MC*_Yx(6$ukN|CU|69*lcprrxbUpG zMK7ZG`n~nU+jioRmipnf`XLu!_(Uo=i zgUfouu@mrTT=FF7Rl~urmOJb6oaXmN+(KO*s|E&}L`jr8YYbdY1S2C%S<=E$ch<YCVvh56Q<^wEtIDrgSRA|e4ORX{7eEgU zss;$;S;qvpZ}~tJwF7#|>LcssC_?DfGJdEEF*ctCe%t&&<3i`ZT1nr2sD<#suua?@ zlZ~Xr%GEx@zx&`?RdO9UiPDc6@@Z$TZdwW_Z-`#^`jNeZusdeka$EByWA9slD_GH9 ztYMqRQ2PcI@;GbmGzDLhNNqbJy8a}1RC*?YliXXvj7ZFW6@1OawWri=xr(QVi)&ob{s$?G^wx3u}O-z4HhniBFP;Zw>tsKSH3zaMeQmb_XO4iL# zt(}XY1fX(XExDsx(XrTr+#LUy^gS%>kpvl7U=#H3AT;@x1;90<5&l`mG^6*j;CfX9 zF|e;g=+SCg8V z_i2ds4jt+TtZIF2D597FxF1d}luXUv5+?=6Hxr8S1;>8VW^4<}Gi(jTU8DA5Jo;-$ zr!@g@30e$7xveh;CPDI`xshm&?fUVd7+8;maUsGN=M`;O|H6GFN1Cw$Nb}c|a4vkg zTI|sgU#*ltN}&aB^qDw}--fhAa24?RK4xCHHJTKZqgb^&w*K8#6?W=#>xtPWxfA3- z1sK*dbKEN9w0z8#Te(xe_o5s-+%-V`egR#@S44aFL^@h$yr(-~RU z;`DaE77r*(3rQvSpA=4&4Y*1j_Yr*E`GR%Y4PINj2k*S9L{Y?-Ie+82n=0;RYRZ{v zG>w6`n5Y}{MSXGEj8!i)dzM0VOtLG!VESu~O{ZbO@{x@*J){QSThhBP!Ou4?+L>=k z8fBWylCsX!e2c)}GGu98Ngxi~t`{}`cDEbd;$@?Hpe)bY;nM|Iw)GHC7w6#1KG)mc zp7s0$jgUSfMLb3)ZGR@NdT>_mF3e7pF~GETI?vv7(?j8hmjd z#5o=Jk;8c=!ZN8_(zH*s%Z#+bDx(z0SQf?j8j{KjXzYM43?43co9@PCUVw}%9IL&Cq_)C-Z6d$ckcu&`-6f7 z9J*;qr+KDc9&gXMiUUBOLdi(dC*V{0)9KF=y)@Nw?k1stCvAaq5wfPrckK2t(oiiC z)cbqmZPhuxIIJ{S^|aNZPq7lbj{aC`$eDZM=o0p+0CN71^Dlt~uzXPnmaxcbOIle+ zr=uAH6OK%s-7`h3|8OQlOZ^9*LGKa#;!_NXyD!pfYu~8e194XjL>_D$wK2T7^O0Rv zfO{^VSLecKg+gd!_*c?TI``j(z47(+Z*JWCb3QPUPZ6;&u>8n}Mdm~E zZ6ixwo7ZIUO!YbxTqaYdmRcwKPrq7|vcz5~R_uAO@9&rklOQv(-`G=wUs%OX3&j93 z?%L{izp#I4A-N8uA;F4hZH#1x?|GnF;8N6&H^ct4FtD{ogij9>32Ed8q2ld|e#5~; zbnR}BO`4m4%K7u?bnp-LM!UXYOWC&=D0M`{q~d3A=`qG(Ay#n*y<;+hGgp~C$gR`S zYipFh4&hA)Dyx&k>-I@E$4{%Fv>-WHoLjKlt{+y4x0T_~-{<;W6BBhc<>OoP=~DdO zIGe@RrFoQdTc$^g$2W?+;9H-*PQ$r_@^KNF=SIT*`ibGzF622xGa+G;PgxiXx_W5W z4|GMTR)BCFYG=m4A=;b#g-cw`i`;X#&`&W8-^cf01I}bZO@m!9k4p=TQ zRV+r5%H50+N{*`T)3CiSn}@1S+noYjo%};vJNyV0W8t#k4im(Sh=2sHXz>Zx`Ldtl zx??{Oy=14k_oWYoy<}im(!ImE2;QYbQn$GCpqF1%6rY;5L3d*Z^N_}9hy*s- zEre_g_%OVd<`q0!L(I>(j=PA*2Z?tWn>r`$_`P(J%ImY|=A z=IKU$zD*&hTU`fzh2oX!l?F9#pw3%_0f~;1dbCUX9o;9VW*YYFZ#RJei!Q>2-UhS3 z_|BzAX@v;)a0|anCx2_>W>`|Ri#m`)6ebGXD_z3F@CaV}i5tf^qhDzPO{Q4Q;i8kx z9uZ+*LzoJMszK@;C~gZpVfw^^MB^?=4F(#l?|mekGM`&wp<56_t*+lMw{}qE{lk5u zfd!;`qkQ1;yoaTlLFHXEs>Y~BY&q1f-(h^tN(;n>iGU2Vu#!eySoIaZ=@(u}rp#r@ zy2kX1c4i;HeoS||v)2L$)I5v43M{_-PSgb#Im){NkqtyUPogc#1jx>~FN^t+nxkvKVe*a!&LK2)z(ZIo@4lEl-$huN;{593% z-NL1?|2DWGDh5h9nk!Q{O*$4A4=n6!14VA-hD&ip3%))Ce?1<%0-;OR@K3ozrUIe3 zg$t*O@W{tm6(e<$ab}|vFSZ2=!FTtgg$ZN8` z%sm&6vQw;X_C+)LLKn>)yPH&w+tfQHzH6J-$c2#7;nTEU*{} zTg0>2xPS7U@3SUzW31M%9M%a4_XS5u1VfD$rK=zr|HW6*PoPI zX`0yBYPkt>FFLp=ukUnBD|tm0KN`j3xSw)KiUa|JP;qn^+dja|R`%b9|28NgcHcfj zctzj@R8YUs5yI1wl>#qUr8lc7{f~-EJil{z7PmZr*^$xeZWoP?XJD}(_z^Mw*4J_t*ZrI7BX!9Gcez#h zWjX4X5rEZ9}2bI}`c)YP(&xvVO7{@X;@hLtWTikG}6d{q5U`66p zmZQI+IFJI z2#ilw<=|SNB_a9|F3sAenk&_pu>d7cgE1%zC3+h^*3i?I-kcE2Hrx#-^ScagH?HfX zOARg78C+Rx87q;N?5d6_c{Tmkp|Sd~(c^4G0Wx=K4P@?k=1-ss7Ur|$x;Gj=B9)eu zCj6O3SDywjPT%MpKd`B~h(j?UyG4s^K&AEyU#_q1qK8js1Ynr%zrW@ZkiMThK>7!%X!rC1R*}>r3!SeXKSJDf)LW|yAk@{8AuXh9zavhM94&d#>KP&Fly}{- z^fC>!_n5#PK2T>Et{CZ--2up{#Yn8;$)1t}?9=7rg|-$$(jFt<*yrjOC+V z0KR9yCImAYAp^tCdFdR3jhKop3_w##(t;vdm_@F60bjm+XKL7)v_H|uvDhQ3-9E|v zw;t7`&uwFySw>mSHv*FUfcz& zA0CP;aqI2MyE27icH5ip2O4>Kn`!U;J|qgK2}TFF@d*OZUZ4G_2WQ$gUuxU>tm5f; z@rE<{n8#zj|D5?J(^ExLg|V4AQbQ)FulHI{x9txC2X;pQW%oufcBPah{{+yT;Nb$pt29A(QqD?`HJgT_MEMT}N z3VcgIWQ=&$cs!#NWn(jP;ZO$!EL1lW@PRv@Yv)v8b7nDc9!;^^0&euBCMhZuMMvCm z>3Gyor!!6}snckEViGY0sj&OvQsR4TEEKtSrFsk|&wXFAkA8f!89Dl+@H&iij) z-k1D4!VjA(4yFNDKG|o^0-b zXpNOr3e`>uh7CQ@Py8NbkPA)^CEz4`$6`($akQya!89Sk!03r>^d0BdZcTy)ih+FW zXxdYux6T7hX9_H}XE%Qu3!Fxb&#Jrvuai+jez$@07=YP8ze!qC*HFFrjwMqZFG-Ov zR42ecjaV+k?7#WpTE8c$+|{QvmoHfY+%E(+-e)oet86N(gE5?^)jbUWoPfCqp5a`G zQrr?i!_SZfYzNdJSZ)^DPJ#kPw{x^{l*cJ0O=ENC#<5CLcMEnSEh$2vF%JAE7J(dR z%Cwukx+Kvf9GJ>pp$v%cYZV9vi{d~Ukiuph#;}(?W;e&QcqC?rYZD49?)m6%@XLFD-$mzZd>DPFnIu898{h4;S z5OVE3ze$QF)bK>*O6AuKNadWrP`2fV+7-`t;VcpG{OH&3K<@%3LFG7r@`L03D8f1} zfdPKqsu~=__@S7uCrJ@XG z<}IgAfzq_FjFBW?YpsIoW-t?h~dzWBdtWwAG<@o4Wa@7pzK zlku;NToGp^?MqiHP|DKJdw1jn1@<;&5V?mEAl}e)K#&i`FtUILxmrb4Y=sRm7y)sI zAl_WL7KE~Q`%VS1Gt}+593db^Ry7%F>IpfzydQdSz4)Bm+nU?@fU7`Jsyx|9IDceu+#LlDf0x=Q|#W!9CkS-R6 z?ONA{9}3-epMT##xQVhNff~S;7x9HxJmZdZ0-FOnD;RG9I`3oj>6GEO@s2M7Ud0@S zS6U#qr~Jc{o-b?%+Y%irMW)tCjQ_)22H-m05>069Xvf{39EOly$;WqhgD-s}1F5ro zC3k*p<@fuw3BEyrbnv=I5-EPtesYWHk#|TKK@~bl#WCGNea(`;edN;sL&VQNXF`@M zn!Bs?=22%(<1Gx>Era#vk z_)C&n5KgIMF8s(NNhu^Jlr6&2^8+l859w0~L=UzMkO--PAN__%Z%#dtwdXs9!o;%r zbOLG)ptpEf>t*FAqGv?hF7J+Y`V%v^!GWW8rBNGa^XuK4)x z=`3j!06%7|{C`@^DV(Gd`21LjcIM`rzyU^gu*D93=sKA$8pM+!eaR&nK6C*?rXu)c zLh<+|J2d)QV5@ZdM2hL2>v1AB(RsBr;tcY7x%H$^?Qto-jpN>g&6}3QST!? zbR#)*2_-!*P4zMKN0Jo_aB41W>hw~YHb7HID@;0@ntCaW`T^x}xL6eD7 zuPs;)S`kbdm-1N7g;8i{$XdsR<2?AkLr>jVn{*t!sSlkfKv(4x7CM34v4z^@VwcN; zwKpgemwAm%%Y12wRP8R>&nGKcPWCjCfzA8Wj(*09;jS;@9+@)d70{H&{ZelLOaB2k z={?=F3-t%Edirx)J>tTYqI!N0@D4;!mknb;F-$c+VLH=dseFZuN91^$6jj$0CL+|OgAW)rA|4lFnJ=&K!WR@>VW@j#js znPovWXf{Xe`Ch;^vDY2+2X#yNOztMrk`~kbdyPNSh^-H6+;o-32y1RytJsq);7EBn z!oB$r$G)Zm@IknZw^r^+;`;dcLkt|PCxk8Z7@sTn8B@be+%2iqObh|&7r97R?qk;k zc?Zqi8g2sU$89uY;eaZlgAK}0!< zBLj=roTxkJ#_z!1Gc*gL&@dh=)g^5(-7k+SO8vy+>t2pvKom`O-dZp^qD zKx=wagY%Vair?LN3qIg8{xV;tw+7>vBJLQt@41ecuUn)@)H;NoIEejOt%}n=~EYqF5%WXh(~s1_-$}(+1>+)8~|hrH+kNF5wP! zjurHoV5SMh6n`sjf0DdjKnW0n#6F5$pA*2NH(AbhRG)qV^l61%s`^&OS%Jb(0dQAz z7m)+wvlmyufg=(z#?o1w-;;9nK8N29h1!t_B{8;Up;|XBm;R#Ueo*sVLiZgbB15bZ zK+!!HJbv_hd>dAgi-^~)^F6SIh~F4H-tpb)`d!6BQzc1-?C1E2_(?K{^F=6UpEAA; zLY`7TFu;%@7P%BvRw7D8Zh%zSeEq(qT3*_QSXOt`M3|%k@dw=|-5PTefO_+V7rPAS z9CQNOg4uPUfMMdgd{Wu=cZxTYMC*7+9I3YHbw&fk{3Mt3OdDd~3Rm}vEZvhG)d08u zFvY6XE{XJb^KrOt4dGBs`q-dWbYddr@}wA4CmWpe|` zIKnful+=XVRmDTOUaU+#5M#n?*(giKW>O9X^ChMPwc77livdRtpIkUk(E@sTYAUwD zk+xsAWbeIrHmHTZ_jQZZdFsK~2w*?y{Wf2K6jxVrgHU?Ezc+yT$};?} zm%rB(#AUgCkI2!Do+ zmo;&Qp%S1Yz+$mgyg}#wSf-<`bCJ0b&8ndBah4ReCAPG2wVDQlrTw>IdCfFeyJa=Y zo{4*KEMW~qdTIyhz?zZ?ch8f1u;&O%U&^X@{{F}7-T}^i8UXCq7|g~+l?UoAK|}PD zfSq^zV`N~`7)Hkvv*sBoW^g6Z$mnTD|LeBbFT2>6I|3-Cv2T7|ScMB)Out?FW#(0p z&E5p*`jQSj%V&UqTta@J;9{&pr*>oy#eF~a+zuTKXc$&&873+k@pD^D+ARNlJM-cF zS;UnC%pZ`YgxVg^ z);=BwI&oi(hXu<$Mw+Pmaso{g&+Ptj_ginvjevAE!r|||044vb>iPv5wrG*7+7~2_ zbl7sI?7dQGTm-2|=S;ebP9@#mD>O{t!h`?YFW4q90s35y(z-G1_`MD?fm#Yx^b!Bl ztw%9gW761SfM;odZZ;)6Qg%pBIj;GJ35nUe%3PKkT+v_jotNKhaJM=VU~yeBQxF2| zm4gW$7p|ol%iWegWt2zgPqB4$9q%yCb6KqHX9Z9qx>Zix!DD{*0m&1uTA*upO1k2r z>qEnvs*!>y{uxD`JIsP$@(jf;ZzSK_{;s~%dfrAyt zvr_|Xnt8dqbD$U zGlW@!?=gzRcIwMJqQ}|p>An(**U?T!0=X9t&t*McwPR)oW65h>(Q4;3lT0O7L9YZL zjLXkPT2(di{kYn(QH>i6mCb573Z@jE`q}MPk8E~pH{XAMFp^f-eI^RqSQnT$E%@31 zvk1C)`fs~RG6gXW$&^S*;Xgf1JuMF88pYW^y8onOGFMjdx`i12Iw{L}#JACX4I2D& zVrAI=OpTezPHk(w;;GP}u#Ti_DiN3%qzN-; zmjU*LBXkn8r)^`iwZ1jk)C80c{%!);jfzX%Ez?iuW!$axWjwlXS|-dnOYARqy7s0k zAlx&WmFMe#VyE3jPb%-oysM$nWUTmF{JU-Pb_)`r{GxTt|KWIgn78@J!yv@wV|_7- zlVGoAqW=AZI<5;t6jN0>tc3Y&FsVwn<`BssR#uTC4yM4Sw^2|-CH@|F{@Y;NuSxu| zH`Ta$bYp4<^jQZRph|f!JWVkdCN|&8&;r~_L=T<;pin{1H;+JJ@!}o#Q$l83>(m|B zfb2AgqYrDYDr&?Z)vFMZNI5g5`q$=Q*3=VW!>yL1xB&IrG!@36qV4lN%*L7Xi+5FI2bNy(|0{DWusnnEmAt}ca z{_DXzMLNdv2a`>qvVIZ*ww>zNG**Rr4TxQADhc06(MHT#LFR*Ur96kgZ_4?6r#qZ_`TwUje;uq(@DW25E0Ooe>>O$ZtY2(Cg&t4 zuBk(CZX4Nck4J~6hE?G=^x4mHP!B?u=r}_8(!I%Rdy6D!mOp7s46@Yl1DoB*wO?N+ zK+|Eulb{_y2KsjX*e$Yb&lng*S?xdYxFV3Ykds_6pD`c!4A<(F<2b|l0Bk_c;dl0S zwY>yZQW#g99THZ%wuB&=t;GWsVK*x)DK%JO-Fsp`RbivLL1(Bcz>2Mv+!M|$^W2u> zUzhSXzl$jP)0gq!@*eg|YJowV)XPn9 z09U$F>bf)Sr0M%b9Q6RlH0*hP|;}) zYM_1gV4y;8OhVut=XxXMzc4i4w$PF?*E%FcE6{^N-S~f*{ei;mx^b?##Mdr#d9_=@ zvTE0p=Z3?qs~3U>2x?bcZOrrR2LD_+HW}=W=r0?rEG_2$w;>q|@L&Yu&Ndx+j`Cz_ z?bLdq=jf5DS6;K^3mi(HOevP3xP=~JOB}K?93!E~_<}PqnuJ+gNt0Mjl6fKQ*PD?a z1z_;&A50UC%!LOaN>Rw4 z%%sni9Nnq6KU3&Y3^{W^YJg>Kn8q5kYxhZRG6?asw6AFgOr86t3oa~S?I1Y3O9wZ- z4W2>Rr=9|>EiDmZbH`c|s3%Ghp zuV?++r1r-3VGf5YNfmXbBQqhJa=f@?c5i*-e%9b|87jxibb|-T4v)D2_nZv&)ae$T zA7d?XaII5~9|HJum+lXoh-rwaRap|sK%KN1UPk*{tGJhHHl%2oqOW+2Gaq4LsnSy@ zcyzDazLIWpS+e%uhNDA+tQFW1B)0dmfR&V!G}ytwA_LifJM(?^XQa(kdlAqVFdy5^ zk@63n;d3i47j^VkbE1PTu-PopU0zminrwFYzU3@=1W#}aPA2E0B42+%KN5LIZ})F{ z?74)XHLW!jj!M#ZTU;uNevf>q1h9L(UH3^fh9#u|`G4O74dhB7RUJi=l|m0-Cy#7? zXu-si`C^5r=&%#0#e{mPJDODjny`yOtelj6BVqOoIrZGB@ zmb=>dYwf-_u+;L(3FfmnEXM~aZO2m%{y1Y6%@u7-f|B2iC~l89CD7UqA0|p#SLR0= zv9F1uuyQAtkf31zojl5RmEz~uNVe|%v@zPJKf})6#K#=;?@Ur+Pm>pv()5~ErXSb#Z}+bL;jIT(Sf zN&c1T^lu``BJ$r;_S!~pC1mw$*FAij-^1QgCw{;S`0@H>z%*}%Pg)AcCSaFyRcpjl zG_)E+EHz7xeMY$%J5CA>g~~mGAN}t9=SJqWG-Az$J=Uh8K7rR6f|9gcJv-NO zBK2`_iX6C(A^vZJnK*BP#sQ2Mt(umJ&-U!F^$ZBDS643$J#;;7HS6V^oSX{f{8r9D zQ(k;TDyT)+;Be}tBEhr2Y_;nNv3ab)c-?WJBmsD!(H7=t=a!>c{;M)SF0)y7cuI8F zgz$nr`9zdv8QV5lart$t^fue%x(?TOui#~e>!e(;#zXSyJx)&FR`or8g+|@DIkwCk|T_4z9C>oH(p}JBgUOjC|H1*~r8VH123- zUh(pnx&H*_w_-Z&0SJ*S7+?s_m~!eOq}R~h-g1Y^vFQ1dXk!2GrAsAf&2^&blv&^B zBC8!;Hi~gAlOHPkt>7g7o^jcgb1x^ia_%Y{bMN1P_CJBClhUGgie)9JtIC{2k5X_s#e^+W60NN^UPoz4Kj%66SMBGN zeuJPK{lyhL=UG9;X-CNM;#_{!uyvS&Pp56v{GZAs=x!&-UCCPJ?- z+rb{!?1LG#diFP!yv*aw(E`$FVf{dN32jR~&Di~Z3VTrQ)k5sQt%n>PTeI!)2D5fu&6RUs^?;&d8O zdB)8&+aoBZWC~T4Ir$_b=gMArfql$v@gMv2{>ao;@aNnSmx^g&e9}rH(dfrSx#@uY zQ%Z@f>%$cHM$7$jwcmfmM1yO#mHmVw2guo4p=0To&;DWERK?*^LhXtb@dj|-My{dRTVo|=9>mMR6Lb<6VoZtrpKsHy!EvzVc6}1U9K<&YX+k}1`V;vYx!(h%GOx_>?Fj`1FQHihjEaqnMkZYSfAj*-V zvB)wsbT%qdZuiaBntR_#0j~cn4xDr)Tv|ji+nwW1IlG?{f!%bX0haOW$6estz4YT% zG|R7lC347o#E7jS>M8*8NuluARIx^L)5qno7oOJ*-A<5ca;ZS>LrKO^0O zXC2R$D2cIUvnf4Mf$TS8{Ke%TRLO^{Wx3t@6+q4F0M|LL&=P85*ju>5L0V=*z>mdT@H^eR2bvpE^y!#-Un7G znkh&t1mLb%^>BzKy9rGfkS7yY`K2@)>NBbI*FPpT6yZwn2*EC|mz1_VxU=O=g^QEL zsrpi>0hzS` zvl|1y4Nl)PWr~casRD$>esVydy9Pd#2^xDgbGbh-F(*vCQ5Nimoc?qceHv&xr=r$_ zB1_eJ2xQCoU#T)XyDA~R7D zZZ?62jLLvOyFsfxq3V%p@1mF=*_0SYFF|Sh6X8q=pP25-mppbjYv`djZf{rmdqjqV zv{pxd47zdeHIOQ50?f3us$kT~gg?nU?$!}ZmfoE|8n(&gCx@ogcct9m>uE=C`S!@4 zOQSq8+Op5F3+Q>n@)$Otc=(xqlm;Ld65=kGo5iI*P*C`2I0K!t+?2!r!R%ugv6lzj z9nVp|FEr))K{Tq3mE)R=?M~L*yUTUTnDjX9aO`*I&fM%MZTGRqbv3~oPg7rgAKk2P z;-=&lrnTdRQ-fO?Xh(cRgB@r`oVfd#R%cv8$?>IvhU)9%K%)EI!|SzbVX6=r{Q}@s zqa4z`@b_KJ!p@DX(O7mO7S=;ckWqjIbrmpWj&HK{K)i9PQhmDjLSJGSgGhik4H4Y1 zvJ>_eqiSJf4=QaAeM)-UXtII)vLYtTDszRR`m8mGE*~D>+C~UayP{sN6<1o)>50!R zCOv3fmfdT~EQKC!N4k`npK5E+J$5gbcGl6~dHruRK7XK4vw-+2(74Hn^v*21&phWDxv_y(GsI*;}osfZFT zA*|=>R-a8N#`pRRZ%%d}avza?-tp>egjkBotw_#^aBkXU)_EpFt+{EFlGV1PwtHoR zey%N~B2%sK3)bwgY-$nBA;NTZhj~ZY;7)hHExK`vMs4@b47wruU795>gyL0Hz4s9I z->)n#WB+IS^weJ*8XYtDsCP1DYZme@Pb0Z+V)#2d)|0Z{eo1-P*(>!d@>vkDhbP-=&O(5CQVZ5GoH)wU+?TAPA_G5LQSPZnrg zerR~AApIBO@7~Z zV(g$lHd5w%|4dy^RZQu(|MZ~9Xpv1o$*jdint!JL#P4Vkum+x6I|2&K$Arsh?Z}E# z?2DM&-!lu+SsYnV@PgGqLod4-SYnG$Xn~_W`UGTZestTfHwV3y%AoaFr19)_j)hNGtpmCSuVTZ{zce&|x)W{KSp1SboaWoyuBS!Ro{xPGt8doAk$0(uQWTdKw{ zs2L>w=6)!=7}D6J?<;^0EUt$Q|F_{aLpdsk_INzNgSIJ0hHdE2)tEoAq~HNGccU_u z?m!XfOZvI{tYi9&KLF8?Az@NUUps8%FYmeT`XDJM7Hi6oh-s|9cTU}S()+TXSiRcs zUNj^a3hj|5pFVco{8PTXj_*ix6twe~z&v^dqx=E%9`% zIG8(QRKBZZYj4b;Rw=Qm3oc#)mWb`>lz}#~(v@5-PT9E+VneakTGS(?vA+pst!d0m zJb?^SWIS`h`Iwhu=@^=rx#YnkoS;fB>6RYd+%hdgl7I)>SX53Vz0%9@1U99zs|+Ji zlb&wj6^C4K2oaI~Z64EEKIG}MS30BPvP5)8LjNor(s+DTMGk)y#N@9orM7v^E8lEM zImV=!Fuwwf{h5KUom2AW*DKcD``(H0HHS9Rp3x;TtsesDn>3>1%F$agW&zlom%}I{ z!`N#zlL~ZQ6(V2w`Rvwa9$5|;fUtOQ5hF13SQN0fd$9z_gO7(XYQlG4$*$6Iz+4WWiALvQ zJQ9fOd1CvAH>4YX$WUYGmVufhJFH*``htp>Fvno>czDK~mbp2nrXDbiW;ou1R5y<2fP zfq{(@9_&e1&{^-!H?q{MO}1f{h&wwSH$`YXiBo?PaSmrWcgF z#lL1h?$-Ul31#Rd``{=AIn2|8wx(x&K5UXN%X~1|kYr8~_;b|SyxeGAaS}|wx~3-K z!{W=gEWy{;6|b<0iB@3Z(RfyxJNOnjUnp? zmKYz)?Qhac=PRGzge9tjtdk9G;6qhIWyiz%E1H1IY*o`SbPj0XY@*-#9@eiWqkRc} zgi?iVFfICE8{>+Jtv}^f2@`>lttULIV*|y0m`u>V*bYA2lu_FRpM{ew_nf1t?W?1T zyEK=aA!EnEC6x%~o`u`A-K7V%Qx$PPg!r$4G(Zi=g-yrd_diNtw&N6D2Tn|Q(FIgC zO>ECMhq~I*Qcyg49ZL$jIx#bWo$6e>!0qVorr~)yq$-Nzmm zDnlM`2f$H+rZfSF<{p&S(L?po54>Q2qfO1Nm@i4QIPX^Js~@-8&(ibCiJM0CzqqIlptTH3CH8+p{+L^n;G1Ta>xK)BBj8+UiFLYvxs;d>VGZsB zQIS>6UWypR|BOL0Qjp_wm}d(wnz}AUp-@P=e?j;0{{$n1n$xh%X>B!K?<<5q1u7q+ zitlt@aAl)qeQ$VM{)=cK!i$0LFP=byE?i4gqO%8`OthF(Z*n`1U=^Vx03N8)*1Q!r z&ZUZ_XDmo7cL{gyil?L7eXk7#$NrR?2i%_az;5$_qGIRk4{EbRaCkVunw;q~z{7yg zVHAln$G9PwO*UxVQ1k2VxqI-gBh(on(uAm_awl2M+)WaM+M%EzNV=?y8MI%HB0np; z3@#>p{iBO|6{f*Y#j>5!5I48cj=3?)9ygX0C94z=bJ2&q(GPLp^)plOEYRRn0HhTj zd7XNoQw~BOFE)p1jE+AK0;u{z22aXsy4b6!b}^0w>`-OIRDN@gng9s^Ac2ZKRYa=< zYa*1H1cP15rPq#2OzVlEzu$FxCMVkTYvQ(pV~zzJ}$NcI3lB7MK3H%0ZJ z>Ow3i9eonL)QF)FAuOU8YHzOGS)+;bDoZF6m>PM3UcmC$s0S+jsU<=BRQ#bEKara;(yfpyk>N^BA zIdsNtjI(@jo~5=apqNnQQ-*p@y#YqYowD;_81U7mdN3g`YqTzLg|!D@kFuF}w*1R! zcyt{EPhj4dPINDiEU^3xw(W+?*-_?*FbNk+AtC4-vl+5xdpM%Wvdi5EGhp5zi8;vAYX++}Ukvt~zqZK=qmOZ`g@qbQ>uxVspqFN_ z&n;BI?ObD+Wp!JIgZ@1V{_#QMUHWk6&h0UvIzto&$*W*Wyw*X|nSs5)>d~g#kbYRQ zwQnK^eyg5b-|!!!F8+mVQVgax5!8b5mpX2xT)i}hr4ivg?7ZuXQx>giHEd$qN>dq| z-Qb$x@SKow|8&-=^Pxeaf667BfpnLKmSGE!zVR%?`!xeS#@ML~C7yGa5h6{@`R{|- z5@ZCCU$U;AJRAq!M62nOj)g9-cOzd$na2teiPK0R9SDG|Uwd3M2%CQYQ|^B{%vkT? z1?KHqi+?em8j;L@!Qt}K3!m+(s8YF*U(52ywL2*UcUsC@;s!*RZu*J|-+Q88um(!) z;X??Ruxlo5LzI#20Lh4C=UqCG5!7a&=ChSX!5wtD!s2y4FheV?2b@tui^u`HwybHv?yp11r6t>z~we1 z@z7uu15uJ8l!bz6;8~`!1k4N92hQh6%BWfNlvVKzSvXE{Mc_oQ89t? z*|a44K2<_zmtNWm8dAMc@W(duuAg!@*o(-N9VYTPQ+p+M42rmXyAK(r+ty+7mj^Bv z>nyvvXi~lc+wn1=OX)C19wYJn1$I_!k&K0X^GH-0Ztx@T#8LWaatp8w8Wmo8W&7l-0YFrShAT7a zF<;aG6$&ekJ6bHYB_4f0%83Wxy(-(z(O~XRqx%%wam$Rr}Oz>8Ivj152US2S$$pg2QRmh>h91 z!P-ctM~L~k(f!hi@vlB+I3#mJgV9|zmwH?J?c3%eB6S=~j7CKf57vn$z06a!KfJ%T z2eL8c;hluYMItm>vdWJEM$4{VsT!%?3m$2~!B3Z#My3VA745j)U!w%JHksR2{H9)m z)<(^Mwt!KX!>(sM1vjymEb9-jYFS*x4?=t^F`|UT5l@~)*#?F!q%N=)ErZWH_590g2=PfiF>BW~Q~nta-^AH35f=RQ_ljP> z$$fmM&7-NJpI&P#D2x-IEte@2d+gS~W?8Wa^4-~@oGOam<|0}2sW#@Z^fAr6 zv;Uy90F9Qmb^g*|c5@il%PP6`XtbJy$(Ne)zHfGM32iEary4MKBV3cchb1!BOOe@2 zMb~SdqV!Q0SdZ6cRKf6y?q8L)8-FE!w>JKwzLR=si6i~YncZj_fUCpyG2+m8Yh&WJ zdQo~5G_q3T_D{K@)98Bwsl21hDU1@^9%j_nb%|OF`V@hdlVzuZR!#3N+jTyu7zs$8 zb)rj~8r_^Khi9N&Hf8aY*@`UnaY>Iq;|JPJb1`x?pYmE^L)xeSr|_4~dPg^1-Y=VC z^lZowd`b57*<@WE`{RM<{)W-RSnE{3;LgaUezV=XM|H2Nv_~Hpl&l-9jgz7ud0fm+ zz8-oC8OFN?R!eTF>u=L{-67azsBZ?u#2(%f09CM=GcC&hU)AWux9HJd>S{IWRG>%}gm`^r?{v%U{h3`nE;bra z3(gJ6eN+!qi}WKc#C>##`o~-7P`6$92N(<|5kSKv(u@?r>A;0c=8EN?!1?iF%dkCfoMm?|os1(0-L@@L5P81V1oedx(9`bZ>sb$=A$2 ziuncQZ5smq^b_A@gL_%?>xCwb%{Gk)|A0w+7$aXe((4KRbk@hJ*0Fhv#0fO>#yCxF zL5@kiJ~!4?{8-_h&%{FcTH9(6%v>yCex^a|xj|^zQA_u&sdv18L)bV1Bh4*X3MbLt zlfHil-mih0Y>{IYgX5TOIMawFEUfVdco_c~er=DY3N`S)xmdczCpLdGeqXM1w9P&L z%3a%tcshtK8t7Lsh=u$?nf{B&u}Ie=t?tFQg@;O-92;C*Pfj*Qlg5OD;u@Z1)qqiE zQzPljc+gc)^+F`V1NQp6MkaohlxGUf)X$tKuiJk9luHQIop)Q1%srlef{GE zjh0%nuM57=Q-p{b)<`);a(3zjgW(L@9%Q?;Xe6^k>E*))VKT!z)jTVH1?ls<7Y9NG zCgxEZdGa^yv%OPC%{%CCPi#$~VRONfuySVS`OV(<@t>a|d$#K`_wMq$a+46u+3FA{UDYO*i z#`t{pwPchhUOI0BHg12U<_f-m4yuB+1iDgimqI$rqiY$IODu^I^Xzkf4b3z)kh0#! z>UPE8K7c-i$cVLp>QWB4gWo=E1L3Ys08xvGhpG|K<|jnjLg^r)HE3)2&VsTX{xd? zn|WprhQ0x{Q+Z^pR8%{(R1?qeLV6e2eP8#=9{w`>iQdJH3@mpSAj(vi2G}knC*MJ- zJ2&o29$=G%{rKAT>xY=lYYLUq3hzLijCAqBS!XXYO)5bj0ta$g0e_`y9u4VQG+tu})YL-z zw3p+xluonM1oGb>$_!G5_(6pZ*G{-LNKs+Z&rm6uTLY8kbR#($=!lAjW69rlB6MwW29m4+n!+~qV3X%YDhhZ^rru53O$$vb$Q*jg!sAnjyZ}}QBpblv* zSE*=okwn3VFN07d$>|j-?9Iy1zIrOz1VlV=vt(1M@%6HMG@L3hIto2k)*R;D9KflT zg-T0a)@dI!x?ZC~uq7h*Oy&(|Iy$-5*3|MXQtKO%t~I80LE@qjKjo4y%}14Nk*E0I zsBj8|2=8x7fgKs0a;7xhP&k^Dh7FhiqJam^3h$0r3{t|vvD7O?TYL^q zyRX%q2|sv;+R2$s#X{(uhsk^7*6(OO-k)(5`%&A<(BBpc|J@iMyh54P4ESyC% z#&OlAHQOTNnQ$3yq+l%*38aRLhE%w2)V}D&6tvBvyt@E!yH3(;Mjopjnae!ju>n38 zxT_gGTuc6C*(o=h(#1Os_dXCm(OVe7lSyAZ49*np(KtD?I5tx9n8TjbC0RVJY#0dS zjF|;$vGHzn$ejK8QqfhNisc3$PJK0=v;TM4{}ePouHoLQD`*&cqmV`LLm5>;=GHdE6m z4;?aWY{orF=*GBGQk$6%@ssyz_P|fMryzSWGfFk;_2sh$z$L+=A;KxoO!IB(SYgV|0C7UP&qb@R_TBgi;qUP%a4WEsMkT ziriyLTuH}2PEH0E;%D=QK6|a2SXdPKJ}rxHcSj{38Y69bM-k0nbrS49*-pv7k2x4q zz+e)MtS&ak*0qge$a@h^_`yb$G{IovE0<-~CLI`blK;xKW#Doo8%fyM2qc zlG>SD36oRyH~v|9{atEf(05|~R$s_Y(v=hl`ySyq=dpWQ`uJk=q-(r(ORPV!m8|H{ zA*)2bw^ApMw$K*tGwWFzBVGOS)+42P9v8`U&4l5prOzXDeep~$E5 zf#jxR>x?T^>!AG1XUvn-F|gD+#xmA?_szVTf~I`m#R|oYQf7|Fl=sc58@6Wr@HuwX z>|fYZwLKabS8ds~KJp`qq9vp)tNbu?;+R^kx*$+cxR7`tw!<^aUf^dF^+;(Co_;h3 zvr-lLC>zdpgtQN$-;Rd*EKM2?`!#G|NOtVPpE{3JFhk*du*c}BTp-gd6xI`C>khW< zLMyG$Ygy_#?DI;0P#Gv=E5-L|#@{QY9aL-utR;C&JP8?>_>EuMZt$DC95R@{2!?q$76cl+rz`j8K{QH|qN+MTViv#J^St}WVZ zL^OV}8KuxcjK@??8r}FUIyK7T2>xc6w7J2K@f_pnQIOa2N5XAitYCc~CHJSC zURI|meMM`;Z%tbK{PcA*zSNKny0>ckSg2MF?IpZbIQ_&j+5?pN!r5*ynD(;BD9a=E zXWC{!y_#sn`sD1ewqmqVrN7L39C;p{BgTIZmfQ{|`IYQUd%(5JzqG4@c<6J-sJ6Dn z($R*+=-?SpKdu^{?xi5~JJY6N%%dFjdKQ9{#nHaoTLsUA?AcH8Ra!H{5*~onP@AIQ za(|+a$M!Gv>ak`aNf^mjsrH-t9}h^*rRYG7cA&XAc-eH1{W@{La1s8Z7e+cUU4q;b zr~U#ILw3mQL3sOCmh*8X#RQPEQ@nmida+|6-Sv30^Nmu5wB3v`EL;Ly6y$4uB$s^H z)XaUj$x2I!q{1=>9}XEE0E+aoSmg2`Gkh;#Zoa$yu;xm8yQ!D)+EXzz#`V;XZ^tzu zSm-wTGqZnD;%{s>rhLkTeA4_*1Ul#i`A`0dJE& zoZ<&mJFqPu@?9O#=p?7NhIB3)fUqovVvz9{z#|YXrq+V~=~yWJ8q!yI_lrdx5H0aY zF_!x7xbrYks|9v58?fp0jBLUy3j93v-EDnA{;(6?KBibgBAK}6wx@iu#yDxR)&EXS-DiVjaxo*5uQIKJZ=#Cq0#L zC+;5RrhA_5($5JnF)=pXZ`t-cF_%biHERnV=h!6Ux0OF@5w<;$P=`@UPd zTazMoh+ZU>qzA|9FFX7GI}>evxpk!Jj;}Tqr?flX!An?F>riAH+tBKClfEnCh-{hR zz_0eBoo$^3i@=YzLT-y96C&+^l^lz+3iyCiw0v8O(jUTS!AS77tI-#xT`nHY>}uG5 z-x){dyu*n*~U>!dX zowd*W=J0phwtfR@55>Bkddt#BdO?KUIxL!^w`V%iU#uo#3e_)8-BfT6`y*_cPceZ$ zq%UghdZ-4WD67$1sRyfbkJ~%x^x5g_v@K-eSGsiQyI)4yRrP|CC$D=1Bp5`U>1`HX zDXX8ZF>E6rh&JmL?QZvD?|u5AGSZL!f*V;v*)lwvqA}uUKa8Ny+oPoEbyTvly2N9g*25Y3MpGO+qO7v2U@SG>lt#k#fNxl~h_N2nn&GO% zy?aZPXqm!T+1$_Ah+4}%@J*7mHH3*J*ZfLoC#znYDKS6puRDV0BcGi{)icR-oQdM5 z8isG&FACh2?bWm`r~FvgWB06sD|C0}b25u<6NCTOL_BF?R2A!N_ zF~z{fO$6WqIzcL?3-gbWSA~6NO@!LolxvwFXP zqptN%ZE?unxXda71lGsipn+5I7p>iQz5T-mw#?ixX2KDHLTqEXK#8F=p(zVekA;)B zbMet>N%kYEJBownW%&CW(`qx3>eWYwC!5Vb0qICBLHAf104 z4;2>la*|jeS>J5}^ahaTB>UX%@Hj<#W_8(dli$*8yB6qn15laWd)*|~!*PG4&s4{W z@%P?NlPKv5(Q|MqVBsJi(!^$q_F!o=(u3j=u?48X0;hEB;Sa&P@l)SUqvQ#01p3$~ z7qNrb+;RgVi$)^pUk5(JLeqFIwd`>cV964p9Wixr{z$E&Oi&8}hom=ws6dSI(tr2^ zNFVBP6Z=9~lKU)v`7<*GQ$`FI8So421>ZDQ3rQ!1AYe@Ly`OpJun9X9n~ZGxl7VP+YMwcEtqa93ENXXqv;u+1M5j?VmAeh)gFj>sUD z5v^bfMuY1;+A4Q0ETmHDpvjeK2+k}xd*qB>S9In4)zM$S0j_gQBIt`%Q1dxu!jpOw z4ap3|5Eai3AGnS`4toXr7XYz(RhdSSsnIErubGLb&L_&J&qtO9oDun-&hnxaRQZ4eW&WTbhZ-+5 z^_Th`ePc=KmeYcUfuQ6WQ(nT7t_2v6po6}-k-Kk{uQ4ZgMu8CA5&er+JLgEp>mJ^rKP4fwn47e&N%1uZcvuF+QjUwTC>S!z3v3U!xRo)T|ybz z?`x36>F8sWPXnNU?X%ro%qV@>K;fVt>}uy`^ry9d#;T?kr9LTPIz=_wI$!Q_%AMv7+zESk@8a>m$s^bkn;K*{B6|sUf~|3&df4A; z3V-5v;VS$B7|&?P=OQWeXjstwRQWe&OhH>yhB&1uxG(re>vVOT&4_Jia0KRF+k>;i zo3wH)`BlY;PUA~+;t(4QgcY{W9P=cmGrS5jrU;{|JMz6wc|2Xq-7u#7fdHd;X4wYf z${TDd(L)?8W{$O7UZ-O1Oe~@>QP?)v1+l{XRCA&=*Si605x9;0RR-$;x@$`x#;X2{r8 z)l_rOC;iZpA4+NOw8x=kD+U+HvOs$0%*QrAXo*_cV^c7uL1z%7$08RkFV zA9VUO3@=u^{3l>psiouykwR6~ailc_fPN0{Fm^1md>0!wy93z~3LjqaY+VUKW zf|2AYBjBXGV@ZVV#4LLcRVHl%Fm73YW>~vhO&U{(fo;Vy(2S~B25!T1Z1Q6wCfULa z%FVu7VDc1|^Vd~xEVh^0~mo;bmf)N^# z&51bsD2xKV2&Mrac#kevl38ixtdjAd2tA+oeLu#j-fQcPB%)|MXgs z_HI$h;pJw^1O&TLa)<)P1b?T`X$i>d(zOjz%E9&qXL9|k4$;FcP}M$671`$f@8o`s z3oJ+~%NT+kXxLqOR9LMx7Y=|#B2h>-^D}bB_v5ugxH=hvE1Io*B0R&&TN{+@GAemc z)%{tgx$M^}t0|C%LjA_Sx}Y%uY=dDhtzcSU26>N302V#E+@Gp};kt_{4#1b#(S<^kh! zLyeirY}!=wsjHP*c{*(%m4r?S1WRJKE~xDv_1qnKkIY_2F^Tg6SY|#kueZ?gp8amn z!;TQLT4fpvRMgwA8wI}i1=MAfYN<}eM%fHx4lNJeq-wN~_(1B3&#zVB^Go9r52S^MN2&aZ17^jTh>G&BS%Fs z2DIJ-vfs3560{sPAbqX@APCpYcp#z58sp~J|JtGHD}Y2|tlv_%m6_oMZtDkRzGJQP z#!$6S{mJ@F{41MSC3#iyt=27tz`=$2#%#Tb5I%_RKqdT-56L+_5(R(^1Gnth6LtTe zrh`N0@&>B$vRhzJVdJ7$!I!-?GL8NzcR^3DX`~*;lE`52n~;?{&-O{M#Q{EF3)`Zy z0gN7sH)7Y(#At*NAU%*ldfc5{G+0is#9~~BY1m0XA`gjjpSL`Z*+6oaO;M+E!;d>n zo}&V2%s@aR<`RHTul#gF0)D1IqA*+@8KA63!kWRjYd|t*3Bah?lhS599PfSvP(&$6C)_ONppTF?{AV^S$uKx^DFMVNfA0dF)WGwJ$**fFLBqv!;P7*O z$!XZ$^==+_Nd&mzSsm!)6{htBdHuGOmr%_4WD7KCya)yoA(FKXFzyJnv!c9S_FE5L z5`_cx;G@c3upkp>sN!!QJrbp0MGIl_nPnk^1@N5Q{-h{xKvchgM3ZGc^-AjiY`A)P z@Cm+$3w9SUic~uVG&hr(u_bw_Ld?dR$8`&^YXaSz=3Ox%folLl!gHuZeHCH;v?18= zf%kQu8iB_X&ic>GsH^!9a4nX=dNzo6*Wqgcv(Qbz0dTwMOBrxG0W6|GY8xAqDnU?V z0^CX2Cy%<%JH>>AqIRwZ(=-^#QPr78>m?Yllz0ZUGP$wWDK!)khA>z1U2Jxkg{)f19lznm-+QD@VsQmqxCEK=vUt8r1_3{A1{l@Qloh~ynlWDwkG0oA z_E{+XlxvT>@SU?7Ovw8?q|_(ROvp+?+Rs9Aw)z=F8mNI1unLjlM?1hRDo|)geCZfb zo9Tkl#evR2b&jI}_BZxso#SQ(0wDX-Ix(OYA-IM(No#p zj433pWj&n?%``XpGM9qb3MKN<7xwS|o|?oW+|&IE6f=VXZ0Uk<=aJ zPU4tZ`hp2PsR_*dy5qJ2{WTqWroA@yUS?vSq0nyq?0qwR@!glc=6i@Bx3R$T>BRu`7g2NA(^4B#zhQe;= z?M;mNK8;2L?I0VZ2o()l^~dSGn0>2LnAl(WM{vKvA_6B*2r}-sLhk!ed1@!_>ici7 zxDBCDMMCez!BV=n?mZvrS^!TkN-7=Crff@fQ8TmLoAt<+kl4C~`2Aka;!1YVh-KF@{-n&%@qWluc1n6% zWc94q3#o}lz)`23LXgaNSc~JqP+{@56=}D9&VzTHANHPMt6!Pz?x}17N?H$HB0*Dr>HRrHf}=oIwx?CQOBGjFj&bvXcOe&2YW#PF2NQYN-@B zi&pEWIs_WwewJ-q|D)f8>I~BP)~I}l4P3V%v%Ao*_|!h(eDzPc$B3ijV3R~gZQ%(s zPt*=uYT0XksJP)Ylf7`SynGdWbHpx%s92Ac#CZ^`HD%%Q$W&$Xy%8EJsOHhvVd;-YF8@VbjRTde${+`R1!)&KcDjA~)9rS~ zCMf;#ZzS6?YBO!3yOW`=E_SiHR|NE}>y0(I@Mh8?`gtU>(@|IUqN&9%7@m}6$N-($ zO(cK^Db6YF+K!)cTUW=nZh#$L7eHFq2iFp=Ah6i^ZPDQb&;23%Z*!YpwrN(BC3|#e z{4p%p9Upi+$nT7_sC-CW4t@6`mo0_$5ot3%2Sl65jDgbA_O zM-~E1YlO&20x6>rf63|J4V~kc6MLSN?l|C4av5r&`yr>D5LfktR2Di(x^{jU_-@iV z_i%N4$_Uk>=@+rgD$1*!gWkL{{@_94-S4E5XsSo>zt)yywIDNr7SRg3T_mBkSIT1V zC70LAAN*o@=d>X~B7oUA<2&ExcRr7kc`NEW)Xq~_}Ztn$s?gzmsrb+p0;N*kU?ZI9!LIA1-U-(4w z-0%&gA^RlEB5s2tfw;oLzCP)`9ziQ=M2rWvU6Q2f3!?{#_UL5BI%hMtNRQdX+aE0r z3c)r1lzZE0Ue_0ffKGW1E4-agp9`0-EoOqW1{SFYUePyyIczxy4tL#W&Sl_z@;LdQ zQ#@3_DhI5Fj5(chR53eyt^Gmzy)Bi2h*7j#8QJDXhljhXMQ3Ppvi%Xa(4n!(z`$0C z>&&qa#z|t#2A9vF*2>qG-BR(5Y5W-Q%jPml29rpKa?Gl}L#8AXOtwsc_vJWo3}mQi zQj}I-QuDu17Ubc9_b&%`+Z1AaYv_x30*;{YITGW`Y1T&(*>uTbXh!&#llscNBb=}O z!E6BOWkd>`5tHotk|KV83H!m5Z`p@nk#7bsx!|8mchC=9j`#?zDb~Ne8ONp(Z73%F z7l;AhKl_@i8+cRYH<=nBx#Eikg-@g!1{^PVmggAhlhPczGF-B^rH@UtI#=^jiia}y zSnLSPkMTW_c)gn*`A`^v;1Be;vVNTjjB~wvi;6iqX*kECr{?b2R72QTIJBw12_AFA zeAB~CQ%stXpl2N-!nd-GcG9U=rzb}FxS&Bam-`?yJ6 zvM>2s%th$)EYMY(xnNmkt`m0HKGhVJ8%CW1q7mq3^I+$x%0rG0AUpJKWLIbKNHF~! z_-~#hYPN!V7w})IdYPkY(k(Z~8{1RC`~;AUW*#Y_D;iT25*&L*x~4E_z<(0w>>+S= zfr)tz#8RS@FENm}9M#b=_U&jIkv5)-=Yj1(Z>D2&1%`1WtD08pzGCUj6x;g$eZ%K% z=B6>&g2CuZeZi{zrMsf+oyGM+zLo?D)(u-phuzey3RYyzM@C0o3X4keCC9gE6Ouyn zSB8AtkYi(^;S)=1lSVl?j|*f22Zw*5v|zwmnCaq4j)PTkq6fjXZL=9nBWM!X0(Ew` z^nCHC6=wchr{0+}2K zzER)$zh9GM(Vjj{G1e@Lb{!k6>OGeFk2Qww^$G>n^eR+GAbxsvdp@@Nkb&>!y$GOB z=iJGe598OZx%7Xg;(ch1%|B$9{~J5iMfV)LMH%pk{e|=bgnIRN_j18)h@f+(`L+Ks z)cFRh<0u;@Ons4ln5~!dvHM{Tq+S`~r2^|2+)y9}s*ven+N^{h9z4VE_gTsmI3^Ip zFML$F2@e1KE{>_N$YmvYK+zTjWvGMeswbkaZS}AN&AM}ipq)q&#vnK5Un|tG*0I+0 zavPnmKZN|pPO~Dxfd0cNItIPECV*bK@N7Caw#d^I=28&Xc;ldFygy;|=*lF?C1n62 zzkEh?eN1Sk^5}Ko)g6}d^q2`W-px-Gymfw2H8zp3j6PYLvvo5j=mD$)@4>)jv9V<+ zt-_I|%0;eIhquCQ9IhD=4sK3-cIeZcPh&0Is?nM;<2=v}*a~Cng+9tsTp!7`_=T$7 za>i;lsH#SDSyC9c!LX-;>AKfv^%d)$)hH>&_V~=6DOrdtwH&S($rKP?Aast=#_f@T z@4s*OhfFKPK|Q?oAMRB7OV(F!^0goJ_a;-GfW9W8F_pgVCdY!U;@HlPO4c@den0FL z7-&0eu5n56?R{gLXQ)W((h^nWOTTs}U=kTH-l=i0uBL7l{r4(ZDgoqx30B)Q^?>07 z8TNIW8z~;@!kxU;KEqW?yX`+&h~A9wHxpP*tVeF^a6Q^>oc7DpPLE2(4b{4SXFF9l z&f%omYE<}2&Un74zeQ^>v=;Jk@NbajskTp6Pesq6)qM(C31LTuc)P`};LCAoNfeyc z;pYQWAa#;AbeP^AhI1Ec=`EmRjeyV6$RR`@9xUh2_8lB^W^GPIQjI$ixJYBBm@ahH zHk$F!nV6~5Zf4lO4l6(nAL1W<o-#ftf9a z%?7qPM61uT;VO%EY|qf_@0-Q$K~b&> z@;1MbM`#^RNP-T0*fG3CL`Wcasl6Dyz3-?plPs_G2O6I*v9oIT*Lo|waCE8<*WBo# z(uojC^?eKjnw>0HpIGF5p+aoNX-{2H<&^|)$G{AaSYJF9PmJCdBOt~m`@A_hko|$( zAo)OizqUDE(~N*z(7EiVtEPL^Jr6O&@iQ9AMobBZhqtiiDG(hnuRy;@ZjEkE0cC-1 zDfk=Y*kO~Fp+hzpdNcw9_wz7%R(smTx#wub76ODQKp?=e;9Jfglnly82>d1*7H8x1 z;NkM+i`57x-jXkH?tG^u`;ge+Y#y!!pH?e{-MOvx7M7hFvc7mt6L}MOgj# z-G65i8aMJ)!tb}dKL4V^_If(T_m zRg79&TeI(bby#_BkA?vo8_-PNCEXhR*q`orcP#%XP0}s-+^kW{sD;-_J7{k_t($lG z-!3f!hJc=UvdFQz_R43^bIH^R6DbJuNFAs!`8mrg_}kxfWNBvt{UjU zvI^j7uDpbT-q_h3T1 zekoMPrBeI1oIn|C@~7Nci)<6JN>IP$i|!}6=PPEM*CzW6ok7&cyS-cDX@%+4Q-$xV zCvAR)U!qIX*2D={XCaxhCIAL2x~(ty0h(D97v%hxWdb_F%dZoKVE22*Q_t;O_mj(@ zDi{jkO1J}s)^TjvuTGx?gle7?Hk~zCT!Ysi%Fwo$9X|3=`nLHr4Bj4U^dO=5qLF5o z1$lUUBx|_X+Ibyl@$l!kCm8Ip+=OKs3puoA2d45D=EoIKi_N4>)i(!rsGg70J>E2I zbi`K~oV2l~H(5(bVZf#4CxDU&3sx+L@#k;Uscb8XVwpF3h6ZjzOaUNBkkn`|w>j&6yY@zb zw<-?@YSp*}3tL36hNYLwQ&g!!R10*?0|=oNb2!6I`;1Cr^4r9P&Q@mQJL8?g?Z~~0 zU8l2>?A%|pUD)y!fS~wQ-rkUV4zN&TDt}FkGPTzgj{K?d7}7$qZXttWid2qmVan?A zB=K5T4t+3*l`FF-7|*V$F>p0`!CkRNY;qWCD0A>SyP(Y+UzYgx{RA$#NI(rtZIA4DP9=yCJCiLdltp_`^9!6{MOpC*A^@;f~1jkFP# z@tPQ&C*ecs?;RG6Q`EC4>hg4pDbQ=uH-~$yv}V6YF$7Oz6rvR<_{ECSCxpqn- z2=5R`%75od9s+pLwd!#8q4hzZDH$lESdKbJh)CJVmaR5Pf{+45%4F}k)EFv!URpig z5?MBRYiB{W8U{>!lS-28|J>r#zd<}q0{3!QAtq0Hxi$4$h-salk&b0$e%Pn+Wy$%C zDVdp}ptR-%%tL~9Tkp`$y*!8KaR94O$I!%D-LFE6mJupZ6L!7z$!{87Ee7L*ArcRU zqC!s8ZuAORQ}Xbk!r3K}xW52Giy0r&UW-1{r?Vs02qHVcZwlCb) zjI)~mN+avrA{+gXVrNbG{`D~Xny}ZBeyzu~{z?)42gBWpF?R?31gJB|o100d1(3I% zb;_FKDjZ3eR)QEr?c!(GCzC$>yAzyzCT(fJ<-x_jXmii#`>(vFFs&rv_@JD@H%&$o z{blrn^%tx}sD817x2s?FhtrViaeV$Z^^K4eN=lygO5~X+YmZKcy=eVjgC{-5vfW{| z!QSJCDyYBbwBJzlt@#W;7~k^O>Sp*=8SLpsFJUL{{t0HV+j@@E;g;&qH$lZ+x;h+z z=SHoIir9@ym{p`C7)8XYgd9@;dl3fVQR}{nHNUi+AB=`@K~#|9*M$8-+&O zSX8(Uo>swV-N>+40|goD!ecDo?B%@61|K*}jO+Eomf zhBft_M>jBSHzSf;wzAgZ(NlT1<=3?ys&tPH1&_XgJ3oTdl*QQJR%C0Z>|46}i)5xWsV69I0SI^OEk zqnz?f#h3Wy;f;XA}6l zG{e2~=ju z@^*^@Bj?wa4L{CKOdvJ5-b;;&^?92_#3DvV9Blh<<(CdWpt2|M=|GK6Rqca3F(uyO z3dE)nOzXdH*E|hCSz}1bI1Jo=FrI=oR_JtKgNo`{h9UWdwL2cZ504_eG&i4W(Ht4$Z!M& z?>b!e<^{wrFy~QdhoQ;@YiH#4>S$eBkCIfOD!3Y^fI8^ralzfRis`-07sOx*&6FYw zh1K`pH&Ny~)u#>DE=PI9t`F_*+Qxl2D#D9ilNoEW*VeP98Y}(>J;@&W?+6qta7()M z41Y-}oDw#Y3nG}*B*D%|=*N$DZ?n*4-~&RV?ZfTxBj)MGy6c>J#KL?KMEAwilJSRg z4J8wAf}15%^J=;}P`5F9$_gubhverlFINaWHNW}fJH7J9p6;ioN-YHhAqCgiHsfW< zpQfM%7v?~1hkY7kxYq+Xk#km%{E{6aN~V;p(!TvyALCp{9@fB+MOtWP0WlZ`gE|&` z@UitVx(}`-yJsV^%p%Ke5V2O%;ClL4T-!wbn^w0*Wh8zY^Ly0Emy0=03LhKJbZ-4L#bFqr0_I*Z97#p z+bltWwfY@j1epfYRqZ*0W>rN+25vvdj-ynf)Hkl(-$X#XL_-Xf>Vm+b!yqS5p%49j zPWj+$@TEezM4SMH6K9E*MOx(|x4F!HGn`Aw%8BW@PF_A&Ty!mqng80cFvRV=+3Cuk zrZV5*P#6TMkU_FeNDYNwgg%v-JI|D#MRE{ve~+O;s;G`?IRH6l{Xq{m|aq2ut~M8gn_!sHFi zBFSJ4^c=qgQ-xUqr{8v-o$IT+{#djqvx3RNz+1Bnlb<7Jic6lH$gQHfDC7F{M4>9l zF~)5V)hQhhnR=-4uy9J(s=&!3ydAf%uBtX;`&Q<>q{@&7rO5(DP(tdoM)6kc{@2e^ zQ1m=$N-QwOXuBw7rhMS^x+E6a}B{q~x_NuIyQf*+po!rvU3Nvgrm`JfnJD$OiY}>Ckh%MT@NK`NyzY3b3B#- zsQ#3o$JURF+|O}eKn0Wi&`rZuw?`^bmm;q`YM)TVqWNpE4D~AGJmiVETd{r45^(1f z2G&oC&!~?3a#K&|;@^S%oIl<4B0PyYeuC08m&|u_l-5NBP4$VZv&AzLQ|c4c+YK~6 z4(2(>qRI9xOm%I_T%{M%g+$5qXOkkvaelor%~%@FgOB<@{b1ibCgzAfEIH<*@l zUK| zF3Q}{wCgbR4*lrRDhBBx4?7)J!TY913&2T8nM?O$H{QUE|v= z3O$=^%8q25%FvOEYlDU<0MGXr{qs8|(EDE^att(P%GI59c5jkzW7`Z1RmIJH+WZI0 zCD)omTj~o|xQ#r_6thU|kE!CeAjU~s$=Wjvx*b+RfIYTcqmx)D=5HijI4`Er)DPZqTtB*P^i6=#VzD~ZvlTQQ z_3Ui;{sO$^UtOr6^wUgN)?$BZ|FW7#O4=r%T2xmn2&MGg=MYm|7Vxdn*`cOKL_1{I z|9S;;0xWx2FW{Kh5mxt@{E%oc`LEpvhs|Kk=bg?G>{%FL)AXS2{rj>nQI*n(Pgh^X zE|ROMkVZ>^fcn?(0DVmYzC~d7#Bkk&0EP~3k8&*z-A#O)!VoDl!L$5BSSz3&nlV9q zos6V$*x{^Zfh>nPQWJP2{g&+stf&tL<19zp|3Q*{*6!|EkcB1|76GO&Hq+BbVjg5F z)Jozx*l*MiBux$W1am^jTW7jPKYr;kgIXsMU26pbfh{-O9@_=Xatdoi=20Hpl>%tF zxKWu0xJ2!KdUCD^J_b`W_WyWc9^OYq7%&)EAI#kXXA+VlmmIm+BQD0Wun5jhRJ7T+ z_71uKBL(|aw9Olehl6859)<<2n^`3`QY&3?6u6^G2}Rxu&Ds_eu=D1iRj8n`{hf6L z+_X)p9me>V(tBoPV2@MAD8F=Gj{+kisP8I<00j~Y-j6H8wi6h)C)2KbY>Y?CP*eU&;9?Mwu41x-xaSR z8a#73+5bBV^FF^C;`0umO&&@Z;DnSMc?&L%!hRF$_1wdagS%rMem5;88bk%ERNcNL z!uN|zn<}#*G34_Ijv#Oc+tZZZRI$H$GguUb!TU4p4df9GK)5N!TQ{Ntoeh&P4cmGL zFMYBa=Lq{=&Wh$?B7g8mti`yw27|3y(LX@o#h8I%p>E-k2MuDc4*7n~V}4X-bDsC|QV6z!R~Rkgh7%k+p3zw1g+B z_897QkaovlW;t|fHE~w(fZeJ$+;$n$gTzuHP4B=XNFQ9r9UTf$_|;(M=imu|?prPe z7)Md5oV$5Ir&+T4sB+l1&URQG*|cr?=RlinQt5WFzu3YeuN6Fvw4Fc!FMsIQO>#C3 zXGsU)d=yUU$$xi$$}xDu=&k{OiJdvzk}=H}Um~aBYnUzuKblK;cMoL8s<-PC{A-s9 zlLB(_P~pnQk^EiyRhUR;ws9~lp_M= zzzIQ)-6nX2_Z}Z8)il!BFe`xW1z#lb@CKT7CI zu@y>xbJFuj1m$Lgx7yur0ZleQ;H)mnJQPxHm>URpVLOFT2cm42dejT!THVp5t4+nc z;@L1T?P_-iHV!ph%mJFicth|Xu#hN`G5Vo0NwrWedhSG%|JNB^E5j#mBmu#-!z3u8 zd7YIC(?kzhAooL0{y_qQpZ{ExIYluq{Y#%zb7I~W_8%g;Q3&8YH5UJ`EkIwUz|-zZ z+aey(`w_P7a(UD)sohj>kmc_JCtsYs03FZpG#QCNrTO)lxKzrIZ{1$nHiJ?1O2fpf z{e{71EigLmwRcq1wqR?!J?P#|xi9Y|$A0S~IpYcZ#YSp7{*b>$vdfs|;(3@%W1UA4 zL(${4D?I$MMl7(PiL5<4?@8rz%^Kl8DC?t@2Vg4$1t0CeVylfe*M1Vzt8N29b;6uwPxQt&ov(Rgn7xVYMlk@@Tn=jNNOk zGNvxoLaj)Ko%C`@W8-JztO=Zvfh6|t!2fn{(?)eo6Sc=(DrLbmqFJq!a6Z^IUg2&* z&N>k%U`4YAS-k>hrHjW=r8ge;A%U@H+U9O`Wr~__e&$;NCV{V~8+Py7w_lrW3@(|Kyv;&EmtbBm5|o`iGRBlfvjI3>3U=Jl25x?0JGT zzXhf>iS3oWr@by@>C;;?Go-r6HA0jL*!kX;oq9KdNXh*?`jINGZmPqDTA%4I|FY0L z_jy=IsGgz7HRyjEBPKif=TgVU=M_$5&a^lJFA;ca+qm}2w@(&VNo|36ZR3EyG z;upg>6Me!!F$PL~<#ddEx5k&r@HI6JfnG9KA0J}GR`bN>KL4If5!(sB_%$#<9y!R` zQ|=QnFccQ(ufT-$>)DiGT@SI@KDb}gB-()ti#zwX?3Ktzm`}js)sPO_jIx*Ay6(&R z>zHLRf*Krk<<%)Puijw(E9RvxwJOuLF8x(oV)&7L)6ssS3fy!~_+lO5RK$CzgU7!v zD;-m6{nk5U*lqKnE$m%n&F@1OGhH|ZD5Ae3k= z{7Yw9_N>fQyGf^i88K?_i*cDkvr#6tEV64^CiuSBxJ+XSIr%iQG}o|SC-oa$g{b^- zu`w#Jg%VP(_sQ+!uJoE%d10CG_!B3rDq+wZx9>%fhq_Jc9YI}G!|l9y-*6(?Z}-n2 zJADH+$kKdrpi-@A4U}zD3Qh~8{-&Kh4&=t{w4m^bfeVxn{+W zD;E}gGwKkOzV^WBQ_r^yeGc?_H=a1t<(8SdPKb803WZ8q?F4>2o$aOhC`qp0GwC4e z59T*+r`Vp5$A!syWtskkx%kU#OplM)<%%iFSa7O?Qt*evB6uO>c8z$Fl;^;T`x5n< zC+1-B{2#Dz=!|4_LA1=M*_6)rK*TxR6Mn{{qLxbYj{}YLLvoeA=feMWas?gfM#Bx< zliDwLO6|6FPt~=aO>=6@ov^x5RyVnPuwdE9jq_{U2))yEcN0K#8@T?F-{Q5OEj&EP z6GY=+#*6sX@-9W&0e%(x;8iGs&BH~KOsy?`(_VGCuJsGHCjy&$B{}5?873}4p-hB+ z=W~iE+ZPM+O9evS^u%X)&-V*En*|$E9G3Vu z()oyXX05hZ@c3`~Sx7Kdkqt1M6>sV2%-4@Oksu^{jHH}u0Tmh1N1EhN&sny0UvVe% zCw#>zAHqoRP2&`<-t6MZqM3l2lE*jAXrmSzyo$9<4@PiK=&g#Ml1OA}9=3S4(h8ly ztrFu34GNbs-#E3w*)nt-Oyr`S^=O3h+w@&iDdng19JGZ|DH(#QzB=OCPZ8O)m)8jJ zu6b2D_@&?GjxNse_gZiM{_D(q&NHmFcoHI{=EjGXhj@9Ien7?ls+K>dzdFXPEFIkp z&}n;e-0KO1dhm1TVB1oq-C6^FKBQ{<7gauk!bd}RPfQ2~6sC|p!7fAT$gZW-f=k}O zz!#RiBg!`0Iu16HIZVE7P)o->bp4i?IHThD*1nsQ(8C>GvSCAB`=bD#_QIE|CzTlr z!g@$LM~RW3_p;Kc6lZze6BNI|yWk^_?5e&@!zL{Hwy+y4|u2HQx`b6RVi5*4_$&ln1o0h`+RyMrS zzw}`f6MFefkP-c#vz{<)fw9Ni(gqW`SCd~lV?L$=Gwb$5EJuiy=o?~;sG zzmWsel6J(b8L1Bcqb~d?+1SP5a&bem5qVKoQYgq0z5OoK{&zXE;&d%~;N;V}|LIJq zJR|L-Hc$)x49n6?MxLvBcii^%o<(1hpRuO)S_IEn6J-ij=impJg2v3m2DL!15W{L6 zrwv2KYg{3u0TMYQ28`wQai}9|ILq5hH*G?M*WZ?a;OM3(DN`0hbTsnZ$l16@S-x!y z9l9BP*}{y!N*yLcANJNT60{xAItiD@?k%P?mGeqmLeHJZ9p2b^Qbn;eS^jEzX@aDw z&6ba=fL6mggK|D4?3$f(|NOcy;L8t*C*c&9eet;wy8&i}ajB#$>gu7)t?NB!5j7?c z#R-N=+9ZDumvgJ>iJF`*aEt@Tu7?JZJ z=U&S!?p((f>Yi0u-D>d0C0ArzhQEc>Rkx3K>f7C$Dig!u(s3DTRZ}b3cCN*@eZn&I zb?pCKx?MsNDRQ6U8n12(X13Q|?da+#3)}mDzP^1lg%R#ILnl^4!{(`j2Cq-MXe4Cs zb~XW1YdE1AZFr_(b9#DnHMuibe*5!fomDIl5FzJ-QU+QoeWLSEFx^kr`#SK|{RYF9 z)+cu+vBv3&)~Z*m4v6B-c$YeX zLv}#D&{^_={EWK>_lLsiravSuN06L7RuFQ|J>h=tlH1x_%J=bbvKfCFPC^{xIUT?6 z;XBWNx=p4Kd;}f>Dr-_HwPUeH0KXiO1F!UE`%cKpjTM_9Dy+$!qMoN9+#K4cX4Cs= zuDoF!<@iHlWz^YVa%H7a-ujjB)tnWgQt1Zp-|n|Cts${G*LOJ1#(!oiaDw!Yv=B^< zQGsbpifQqv-~9B>X8m?EgNbDfRy;mpsraHz$|n%y&NFF>RBpWNm7<$T`9BfK8=(Fw zg|k+OdZlr#l)M8`UfYf%t7YQCWY@84kzQL!mc74*bcL<*bNCHDBIxGrkPv7fYS zJaJ3=hU|6(X_l3?iv6cAG`wT7{8j1uy&T#z<`V4ViVj(O)A#l5hI8=y^Ly4QfSE~6 zHN)XF=KhS9p8GhkXrF#f_T>V0ME~BJ*lwcOfb?!r30PgKmNTfgp zjW6_GDARqJ+g32590MxMMzdT#p|FSjfLUfa)K(9xB)+NK+U1$i)jKHpa^{4QVrE|3 z*t$qd^*z@2IMtQNC`6DcAftVJ)IwZA6NunM{@fq;ekLy9&ZlCA1rmUIm+r>V;qVqWS$^O5Y&%jPDzmOIQ-M zuvA{3;@I}hLN$Hoa9{@phPaw4b8EydY^3_7-?@E?bd$jEGh0ylgb+DIb=nwS%^bV* ztEHbd2^{uUeijAtscFgW?g8muPSS}43#;`%r?!zdFd8OJL=!A9^b7uX_F3E9g+}K$ z+W|fG;a_&ivf4kH`9lLc;IpjNoM?Kf&*=2D{mXfU3{9AXp^~g0<`i#LzWMu!46FKw z@LBT7e$b+qK8}c;?ax!q4$tq}&3P1}^t0p!gHj~qXJ9g@C7*{seVigjVPq|kA69rY zr-}O+F?au-LyD{$ZEiTy?Aws>)RiUbVp+Xk05vNiw+L1FAssumvtqoprj3iGnMQ7{ z??iJyT8{={t=rJ9kkypfo4{@j@~E2HT@d#<-u+JpVs6CpA}S zHelE0dY%FuU|7g~Gn?oAhrDnJOYp7ejf`Bk*=S_v3dpdVNXAy|2Cq_Kj3hBupea(i z0u<2|2OY`f_9ZfQ@foMn-5zc1f`B2k%grd+3@5ExmctqV%|yk&ZdJ z(j?0dQ!C>XUm8Y(MklAh9>bHH{!A2t(r$_1Mk$X5?vbP+siVPIE(2AeKc*6+Y<8QL2%uin0@& zYX&{$ZcEv5dXDOe23N1Fg__gupBuEYM0RrNA}?QFiQVpqnkH$Z@UTc@XZNL{caiehuci`$sjf+$O*7w`3xc zj1yAID&2hxgWN!ad3h!$kO~aLhr(AvR5_)+@vgbiM`D>E-r{pJVXWa0JNu;=J9#^m z?yTt$*I(fr<^V0a+wOku_mg0veiNRVVVL@4s>)JB-%+cp;qFEiM4+L!|=mEms%vAU5%bL^V7`@6hjyJ>UKsw|DSZ=?etlrbp3PjM?bYWqr&O z*tpJ|L6X@o*Yb7>?RLX518QqJj8k*jw8>~Y#-1*TbxFlXvcT+L5$}q3M0bumx$KNP z2pi`hxfOr?tg4()5OrionbxpZU=cAqhZxj(;TqTx$?4G^D^QnDnoT*cE+=}iQRAbj zF2}QrZF>7)1hT!97F{eh?QlIB5!lkCz&@u)jNdFV^;WP|&D4pop9|J1H940g7&)`f zGC}&E3*HVgY8Vxu&1j5R>k=2pNVJooTl#;4Xs}F%9hu)nn12Y{9k1ZNaJ&FaSue1U zdsyc?UM^l0J*9W5P*nsr8uykQ9r`YS@j$e**ky;;Nhhd|6aLG1Qf@kLInxNcJ+S(o z1Mr`%U@kd$6m(5M{EbQC2pT(Mv|=dUUsCO0Mo@Q1BSaNRuvwXQb=mc@JpW z)6N=zYD*AgE-MA~pnmLJ!?a;eH2lu?|JD!qQ7@C*LMq_C?G9ysZW6aj(Zz8X2YZ_z z62B2QGB%QLpbG_he?87Q*Cy1lOZWoZ%?e-7UM`l8YsoME(rToeJ~f}sBNho>oj?JD&WS!K(eWPz7K>*na!1nuj= zR6;FDxrdgIAxz$$?YHVUzA2xWzhW9MFU1tRsg?1I>>CE-%zeo-JO5|j*A zzYE?SgVcXgrL^}E*Q0i^Bm(aw2|Y|{!=0Fi-fgLjxEUckDY6qk3m|f&#mkXzq03du zC(Aj-KO_!<8c8-zuTOmi7eHDkYJ*^yC1ld>Sy9&!#Rbzig5}@kZgy zOSF=OpGcdcx(u=+(wgn;^f#x?jZ;k}nwK%MoOaC9CNq?B@Cvr=7DcruNSR>U{rNv< z$bcHe7UGl0jt$E*3{MPrThf&7J(_=Qf{8L{EEU0a%*^NC*c?dlgos47fO_d(|4OoD zJL~V)b1J)R(yeVCca^Jzd{a|>!T|jMzuCG-wk9Tk-R@IU;!b#JbKS`lqi-ZrIAdM7 zL+C`mRFrR3w3w5DY0wW2EZBVBY0x~jt1-PUB8UnE^-jkg`(>JCCRW_Cl~Ru=Y09h~ z*qy3}QP<2}%QrImNfv2$-i6c|T|nx1bi@gQ`GV4w~<*({!5pI)NQ`k#uR#Le`d z>fUT9*67?+UcE)prHTl0NlofR9)>sq&B4 zj14b{nT(oeeCLr1WT}wKUcUUPdXm?1b4wFn0mzev)QWAEVRCx)n6J_%&Js~{8if$^ zh`veC9`x6`G->G_)|FGldQF4!=v9#nuwR%#L%TK+vOCpAvoeR3y{i7XN(e(gc@tQLE6NNi{#L*$DF*huIe5|TuwVq3~3axGS zay_(h4gh+t8VGoNZ_nlb(boehZ=~+B+Cyl^F^~LZc1Tu-?R77IW#TY+rCL?IzfUB+ zN4J0B>7gM^v|)3HDF|;;XA$mvL2ecRZD zV)xHo^~{Q!Yva{%txK79O^e@H{$&&E@f+;)gMhHawr9 zPa#1BWP>s?`=mpmGLl6yT(+pL{*JN5d_5mq!V(|Ly!|;bqME+2tW6FiS7{H05t^}z z;tre|TQ<=HSRB^o-e~5~75Q|U)L$kzQVD?un{_sHCqN`d&Y@#6lpiax|6+-ESStm=8ll5a% z)v(WWb_{>_IADYwY0zL-|4xlL_V2X!_{}L)cKc_ir#|8ioz88gsD~R;xA4q(>H6gQ z>se=hHLz0H>|_`!zS$R?mTKVvGT-8TrAP2AljGkp+MVVB$~WJFoV;e=^IU42d!E5c^NrPKCVf zEd#=nqZS79#%LnLh~->L17Phl(v0Kq?FubaHlu@r4(W zng5!moKPOE4*}SMI_BN}6w78Mj4WBWYZEv4Tv98Ol4>m2X0EoIp1GSC9VW!mm!r3~ z+Nm|;wE$idkoYX8?))41CVT=b_}^0RYS*4JEU$b~uq4emi}_{XRzPA|iR7-0MG1ZL6cc5FcOm_`N*K z_jdtpW#ea!T{>hoRR;|6H}1*q*;#kJGC95jYmX{<(|@SUt=~MjLt=i@k^!j{ zeOBHRfxK5G?jPc6nAlgGY;X9E%2TE^p@^p{en=c3HIHP#++tgdH9lOD`K6jdGUJ0p zoBLul>*}B7kt3%M z6~Cg5m=x~%zo6arA+%^Jw?BHTC7a=&<-viKx>q=O5+B@ab-KRMtqf)sPeA7`IwG`M z26X8rl?b$grXKJhi6B^}YTnweljD716j(^$uTr?pO!UbqRP3FmAc;FQE^T|h&jf!X z8<}#RM(T-@cQR)(U&P$-J!>Gi@$V`?OBhGwIVt2q>rtt>4S zF0Q2k`r-3v$;DP!=_Q>Em1FUO#tg`+yV5&R!VG~TeTGdlDEekpUa^7d#Rxu^h;lk^slLs#|iD(8u|yvXo(+Sn=Q%|L$1QiL%XzTp)LD1 zMZD5}+@7(ZQea11%sXFjBscSW#W;?=Y*-8J#za3MM?+iIMYFiPd zp$n$Xe40z0IOi&A>)*E!h9th{qbL}sUJ`4g2I9nxJ@9udl%g1~NgM@&!G>Hn()RTi zt3`$)qfCd|WC3|6M3!Wgkv zN`y2mswC#wjiUa_3X!J*Ik%MdOETAA4?T@S zL+!dDYU~p^dXM0`J{MZ8>4XK2aj)qCyul(I^6jFSZ z$z@3*6XiOX)QXRP%&!Gq53iiU|!QGwVP(D7)R zqK!q*^{_{f#>dCeMT&16uci5#nVWS4rW(zIwUXc{+RM(!l&g*vx`YLaU(HTM&3K5L z>*9@63NY4J)cMG^k8?r5(m;F6+TgNadqn%~1#`zSH^$Awt!}HptF#<>u&VJj7=b4h zwYdJ`$=6gpTHc@Pndiv4WPWCO-4<6Gx;!9u2S#Kgm9j6NS3^pM{=#4ao4pynfnq*`g1ruR?d+L<=imZ5#BkrY>Mz|$=UOh-r<|e>V8qO zt4Gh1X5jv=^rX38V*L(2%_Kuprx~#m>^b6cxaxbJGhVH=o9im8-V;z}dcXDI#$U9$ zJ5G_Ij+niHq4xBe)|bWi^5Y5SRj91FfSUABaW@BQQ&$=tI+-Gx&gL_}gIWjB!2}Nu zGcXFyl?8Jd-*BDI7%`DG%A@e+hG;@jMoDJT4BsH%@w0 z&qJl5qJ^p08)}+_JbonjvU-`OsR%Q%xMm%{-nwN}c|_a|J9Hi^DL!{{K5Pj_R#+b= zK0{qRS?aF=ZD8x~A#KDy4Hm<#k-Id}^9B|HWY4Ws+mpwvO)JEaJVH10HnyspX^h#N zt%qfc!eNC%$&}9y{TD-5SBph9Cf=n$rR*YJS6MnxKA4nhO#?KeX7XyJRo1v;y9u@i zro7UIfn2aOZQrZy%DE#8-YeKjE%ffONMF476$oh3j--&VCS!`-caT1aZ+El64H7 zbYhSxpy5gB-bHuy1wZQY5+pDYS0{v#;->AGiQGR6wn%Wo4mbx?p0V3E zk~&8EwfGTrB(vN=L73PDRXCepn4ZUP!`=#qB`CrA4@JH6*Yg=6hztdJBKX_>Pgc@@ znYoyiCsoq~VniZSA2f>?<)uo$WWzcvhbP5nv#>YSc|OXG7SwBej{er$M4JSj=<*u` zo57w3L<1@=-P#fp&J}i;bVQ8VUyfOK&stTn;L7qNz<=@x={5 zGk}rpt`Yr?DCE(Frh<{acIsNWRsqHpILqz(3>|C4+!gCB&@ZB5S(go6?f<4Hts!VP z3MQSSLxixKtja^mb`)POt=j9@4bgj_(EGjt!TjA+v-BR4#=PB$;2TA!F%?r7tgBRE zu9}GWpJ%%fPM?@?LfZ*|@%0 zg(E&QcrIMM@65?Jil^~b#bDGtlBMfvlViBrFUtpe7?!#SE*vO$(>N-q^%%KZmr&hG|vJK03 zS3D^jOklw!YcQnuhhN$G&G%$Cu~{zavwG%{Qz3NnD*T#OQBvs|Tzcg~7866Fi%|*u zt>S^H$R<3TkxF|NgbN2&GXhZBW+JHLWoL=e+tc`whB5Oinb3=POY@u}60K=*s~EH3 z6`pooN3XF+2(o!Q33JNg1KI>{*9)RNsGHspRbJT>{QqFZU=4-pyVmTVsA+lAhkyPd zaRVGJtT|#LZ0EwhBF|7Zr{kDcFox7l9b0P@X#=~O!5X7+(ym;i=MG_L`$~g`H@nzT zd6NzbMC|iN8AylOUYaMPnl6r_Q}e@)B8|x}|B3Ja#s9yMP{_M2q56yJms4gj9EQve z7qDMOxP+h`M2Vw4f84iMe~V$bYXMvZc;A#7wOx!V!=JZ2XlucasDHPstgMJ$jf^QH zf9TpUOradHk`fQEfF`NePQ3irFlX16W#U6+b+g{ zo1=wr6x5m2n{W=EXy_vgV?0e!B!1_5d!U)&fcx;Jg( za^b$*0mGa#dFEAWO`~z)$ovJ0D|J+Krtsk0<%m<2pTY2qhN%-FS%?+lmNbvBeSgbe z{IiUs7DW6#{4V5IN(^M*l?dQRWHiI+$6OL&KAIe|=ug|uwT8)V8g;Ho5EiAxKg&v` zm7ytVUHHlNnfe&+l3-1iUj2S0LQeFn6Y>|=!&3KiqGvo~W66+)tqyImKT4w(8PSN% zH^z;>QcO4##%t9ln&50=4zmCLQ|+E;^*3w~L*g8$y7q2r z?ca&M(=_Cfct$i5&8E?Y=mbm~bvVm54wC-(xSF^^Vg^J${zvccJOAwcm{$g)J4xA~ z+B&wH;3HPFEuP)qwG;?eKWtIGwZG!i&N|XI;>ky+Oli=9L=bm-Dk@xUE5)37!|cRj z&xo>{`tM)7r&x8B5es%v?b1(+k3Y9QE`zadY($k0Hb}P(wuS!OP`)@bBioAYnIM{i zq;IR*xUU1MG^5EH5^x<7cqVZU!LIM3+A`lrE=pj6qD4|gEV3fEx{{R}%H!ac4T9?zwEzpQdwp6_9jzoYejxAe8^OCepNaayYj;qRB>J9b34?nm(f ztGe*;2`5vS+`@bA=YDm>wr&Y`gc?1z%$+FZTHsAT=nwpD)F0&m6)T2y{Ny?D?~Q;1 zah+YfEtvDJEFo&G%;!+IKU^JW4hL!uFzisx>aE+L35qP>gtaax-|Wda7|s$uNv5zY=sGWKWjWLc1&kW z4SRI&E+4!6pJqLhkt0q}X^INE=vGL|KUX~#LaYeRZF;-4zD~)Zdil;ppPsjQ*?K7< z`8A2l+g5G5SoN_ds^^l$|L$T51S64gD4`!#spFUycd5JVd{h0L(-`9RrIKxHD)P&K z__iV$T55BzU)`NGW+L*cSM{4PHJP3$b)Y+rH0_T#h^S|f#n(mVvO6Jqd&Auya5O&r z{h_grJ<54wOegy1`@2N=ufnyl?|!}vU!|o+8Uh)Fb@b-QxO2Jl+xa*4dGRrPRa(LR zHJu#}!XW{7z#ijyyVCYmS?+ZGL=(ZU9*Lx_ZG7ojNLa}8M_vuqYos`h@dliFP+ZL? z83T}O{^0Z{lMsl20Fw<Fpp&B1WC-*CMTY$LtKRETBceo zfD&MfyKc3Y=C;}9(LEu~V^OK20hZ9gw)MHhiS;seh7DV%AZ^ccG)~M#PClr^4 z!MRT*9@&usk+Iru0BrzCTTUsoJUx zXG0nGbzU zF1DM3Zf|8!FIwEOloFto;~rz&#yU28_TFO<@ROh*;*z+nKY=4o^L@c2e)_|G$L=$G zPnVMmtucp#;HBp3%e61m?-a(_q{ryMWhmRF@zG*0hqHpEROg_UCx2_(I~qQkicxQp z%P$rb4~#h=ZV;^+$ss%M|J>MluJrqyZPp+%;BziiP#jT~{KxZYH^rljzBLmF zFo?!Q@<@ZZx2_l6XGF6?ns%8KmpY%^)TB{DF(veTHr1xFq?#+PPygBo?fEtPD1G9i ziqN78c5}YL;r)c`W zbqhc;fSF*UXzv=T&G&_vPsWk7yydM5-=>7u8H?lz8`7ug)={nE zOFA?gBzJvbka4VV=}SklVr(Cl2rodvG>yYp9W1s>&y^2(VpI6pkX<#>5X5-HXi0ra z@AMxfK2Oq)ge@6kE(fPha(QL@eKYO5{vbx~mxRg$3N}pej$50P1Ks(~&eS!3&vCH* z#ZKIo&F|)TJt)1eY+|fd8SWp(vg1r#>o6x^K6qn%%m2EOqs)1|+RVtB6MB4%NV|C? z;?>%@*SBVRg91l`nX$}?9J_&22`@@2vn|h8BzI?Naf9HJ)abbPs=#mRuH+CWmUTnh_t2tk%pLT_FrVTcgPw7*ae^oCWu4H zYHqj92VP9y0(uL@R1~%#$cTY!G~A>q#%NSFRJG@598pmR0pVl@i~1}XabSA;=9oX` zc%Sz}ZD6{GgY#LbQK%DI`!n5xqTWstl{CHK&gb4(Rjc3grMaB4bh5(VfBSH=*Y%o{ zc6a%9v3!~RU~6T3Rrk&-*D4AU34*4-O8@b11d)+Wy7r9$nL_3d9fWQC?R7a4zg&7t z6rrl3v|-|jbq+04F>lnCMrbAb1gC_;<^}XZm8_|x5hsG&R{%hpNp;edGYeH;udgE# z=Dfg&v)K02@aLLhm}%w8%Iaog8ODUZ)J4~}DYmIDZ*gBQC}V0i+KRBh5kYyb)x*^z zdna^J4hG_=adJmN{Sgz^Df0W_ybY_cQrp;Bx zSvYQ7e)C}Y8Y14#&Ti5FMs>kkP1gf(F-+(+$}+>2{4^t zAd;GVf4_I`TUHD%7Y3t@;?xEATZDR!f3=OYmHJOqe4#9iit9EIQuiLP?Aq^D+7gfs z+tm3I=dT;0;h}x2=vhffZXJU_N#$?NhW?o~&XMLE8CMxXSvW`Uijenjxw!w?{;~G0 zV1c-Yi6;>hB<@Wy?T+BDtf^tqc8YswrZ8B6`qnAy2Z1(b<%zHSa;m7VtBuh}0&}-t zj(jMo^q{qtwCnOfYr8{!-EbAYv?L&SrkboQ==Frfn}i5aDD}UFkJ@IdQ3M`^aA&q_ zLU;kD$2mk~Y9zMcmJmL>`%hokY>dWezUx2*X9;}HYvvlMZ8?RZUD3%PJX10B6s*B< z%#lXDA8%!dJ%@>;VF8(Jz5Vgz_`b}pD-V6Sq-!fWOBQaf&2HiURKTCqdC^ioUX`K| zTywS4o^>E!j`%i3T0J}5DKQf$z~6f4$!m|Q@oBl&7G$0Ck@>H`lj&FLnqBq%p>K;k z^|@SyfwntH`&x$rT%Y4w&q8>Gk?r8hY8oSLk+e-7+Tt=<&)OfSCBj1XW2;~gq6ovd z=j=y+y=@(#BEWeU^FV~Dfa5x)2TKZdc_L<^sC>Mgxtx)|aapCnEMxPAyvc8f`za_G znVyuw(DEBuGjvVE{ee zvEIA3=Hu_!Du8%I)KA#$Ak1klF{wG%mxJ!VxLkbXX4TvDkWwXMB&Ke?bClBb3-U;x zI9+vZi_2qBi2%qCb}~R5(^L2*Ls~IP+5IX57tIx;goR>94E|<_jLK;X0{ShN4(jaE zzMuzPo8xIVo-bZeth@ePo$?V5gZ@}Do+$H(_bJ-z@s96_*EhR&r+Z3;+!fdQ+Kgqm z*?OYXZ;`NP^g?9YxZ5+S3|y@u)_StBSLYVaKl7UqTi&@!$$!#m zzT(l9T}Xh@xzL$oU1eFLj)o=bd3^N}9U_Y`vobvudbuXX@7VI-m%;84jO~}DOy5kt zaycH@OiS(r&mr2pLi(Z*`WmMub}fCu`G!rw?ThR>8>T3h6CJQ;ZbjWL(YHPEdCXzr z!Y=f%<7Ncx9hhfiiDAS?>&fT~JK=2q8^c_Y?V1{dv4TCnPg%7oXy0$7_rK5^F*eH5 zO&U!6;jc^kGfw+jNlZC89X$L8Lj}TM1MQTXVBexp`T5L2*WZd}FR*Qu_1jBq)i4e8 z%WsS4J`-$0K6j5k{&(r?&<}}|Xk5cj9S(Mo8J(IroJkyV!QKu4Hhv>EN%i*ivtM7a z;W5^%%2JHyzyC+mxA-%?|MBZMCtXB$6jrHJ?jhG%mv5=WPUL=Bs3f=Me%n?FC0Qlp zx=JjGCHL#fZIxKL&3zbV8)j@SyL|e+&+qa3{Q*6i?elrRU$5u&|GWAMX+k+%9{R~s zu5baTS4Vm@S#ib_XjES@jTJfH+4}4F<_4r)_pSPLTg&X(3n369vFZ+U=+<`}10Eg9 z2bcTho~qPNsptq{G%n8~7cqsBNldrj|2v?%IJmEBlq(F1pcRm3b+o?VyetWF8I^ux zF{eP-q!N>H=IGhj@^8!+!7{OsMI&O!Tv(X$PwvaBDK%4zA!|GcTl0Z%lbI<~^99Cm ziN27HdGCHCo#3N2vNXB%w(%AzC3SUWDfwDPwc%`Uo{mYH@}7Octq!EeEiK#PTZqTK z^o)x%EnxDyhgJ)Y`>ME$M?1@7`y5+LyzUg7ldH`R{C+sEr2g;=$AH1Z*|)OX585S6 zO$7qOhCn%-L-9rxf^-bzJ;DA8nz>`dCa6(!OtmUyXo8yU&t8@@1_ zFdCBcLV3yVNP40%gnt|xbwAc5?niJf?&eLWdfXkaGUqC#iR`wC^jhy0PmkjrTY(P2 z577O-%@b+(aooGa6lEjFhkE83&BoK&-uiB~%?v#!ho<6(&7S@|#JooM(B~Kpqf&~m zvK=mlhv*=!#UzzEoIvQ%!H>qFi4H0*tykf4x{FQ_WI$Kilsj)8a4r)22@mtNAf4n` z@7Ej!yVIH#|KiYeT3+LK;BE<5US^-D$lp&brBD46rPPJI-}Q6P$u<37cY+Q*|N z@781YmG@7G87j{iCm4%+FbVqXnvXif{{OuG%R*8>@_^RC%4*&7@AgG#sk7;zoqttF?NQDPV?!J|lnIy>(CzMW+_Lp=6l zU`RrguAQeHIH4daKe$5=s<_UCE>8-_q^B_v|?to)Ln^(HRt}wwz`~ICLoq|e?{Etht z_`XESj>#)MCnp*uc zu}V1Y)Dox5SX8-WWce2wk6dO^*8(OAJ)Z$?FxgEf^Gr-^wKT!gW5?uT*NPD2ckTXE z=Q}+%s)@V93zylE^c{W8595+>vdQ_ewP(2X1i>)-Qf0&~D27lg-2HC{5ul&Ir2|J; zyrLi}KHr6qR|9i`@aSz{gU-9lXy*Pi%vA_mt2VoV<&go^iveu^3!UsJai89En{o0d zS>cS1CrYzJ&nX7*NE(|H+Kx85H#s7{U*X6<`ZDuMtP;fdx6O+z&=86hqrh^ETkj3E zk9qPwz1<^ti3 z=bgBmCbPB5ug@j}k0n?OQ1eOa8#N9tYWdsDp{DfQ}Krbc%7Wc zAE5MQ^Zm9dZ0^+z_~ONJAvRWcgH{;nv7^BC+yP2Ysij8@toH_k)j84;)Z~n&ATntE zZ!tC(J^3Suu3%#pwUGe!}2m>TkcBL2IW>5qs7?TfN(Kbc#nIzh7mp?{Wq*(W~Bvu@T~WfiMojL3OrSSj%)&GW?9Ks@y>17GxcZos0oRv4Su20={9N^O_voL_bXR zu!`O=3qmrwFjilLUM)eQ*{@l+z+7D|Ms(uW9{y{>2nW@BYH-A1pf?JrCW0I&uBPKatJMqETbl=fB)|y6P ztUDuD%fIyGnI%WjAne(y`KpRZThx_|U#Bw;{I&U>c!ZH>%DktSXBPUMM8Y0ivlU(Y zE$(o)xMYI>S?gy*?qM`0xx1Y`e8X4uzW?2@D-dbZ_#e-}86yAl{j>JLuj3~{a;W%; z`44P4#E}%GGfuW8IXbj@HdQ9HnEx-Dg)s}0l(ANI6(}M|%|2ZG!|`>1daH4&bKNcz?)}XdV;FP_3;RO3YAH1a|z0uhey+ z{qW;MwUv}?RvZ;|WyuVyKGu|Hz_x3(ogtUm_XF9-XN32P;jzi%HqlpS#N?lSi>pt} zy-5_miTcqyZ1QbK9ri-Y zX=c0Z=qD4IgZj@3*G-iYyK_&d$8PfnX;pq|g zbfwqVm%wEskDM(0J39n)pQ-l%<9_kG%Kx~cX{x}YrbtI^2?k&zJI*gKp7M+L(i>Tn zD=h*medeGa^I}oT6S!7b=BSUDiw_;tgT2BXW;TSaO)r|g)Ihm>`4ZVcn48XfKEY` zOZAP)vbR5HC+=4IO%7dt&`1w#vg*qbRJ14;{T<_zhs|rmh`e8E8+krfyX_{7Fb@T` z9*I4-Nn??g(4aHn>))-jmPyxH&^A_xF!C{3RiC{pXXDpZiA)ss<=vMwMu^^Tk-YWO z>+DrhqN0k#8c(EiuA?vK;(X_h(b_O}TLrnQM_3VwE_8=JemULY_teeUt7(kJ^n>L0 z-J~R8>5fVAj89rkDg6X*VnET{Kqf;vV$TAWQWhpZ)D)+v|O`_Nus#P98}LZ~2upoLL`K z$rr~M=f9ff6}!Ujwf$RgULEVuvs`D5HIjrpt#I9Ja^$-m&78;9TjI?NNK72Gh0Dl> zqb3#ZV_{B;S1fiuV6!>k?Zl6sP9`9^+oQTLrnoCv%!l2>NW#_fj8)y7RKgqaW2(st zX^IGKaV_j9!1kE-XKVEWCN89v`1`z_chjaN|4g4m5Zu90uWM{`PtM72;bw%Zzj2{{*e(oNEOHm!2d*gqXdl ze4de~pj+fRZ2#@EtjOT&L{_;|TYSs-aE!*p-5N>gRm|3X-s;hmOKuTLZj0p0 zafJ4Y1~Lx#iAc7NLZ$h`RpC%0a@E=Ej#h~T)4n@ht)sPZ7{H7TKcq5rlt}RnmSN^k zd~}93fh+h*S7eu|blwQK2j}6;W_LNLOGtNK%oOcz7?LZ^ z3bv_{-l}Ek{#ftf-WA3$Qdtfpf1!ckU(2@N`R@-ly;pk~@JkkX;4e6EoyNm=Q<6TX zZ2DRAm$HfM2XpJphGh%tC%jf<6AG$)dgZBk z9X>7$G5^R_qCTe}J(rQ5tAFn#{S^gwXN@_Dtufdi9@pwn?)+We4k0iPWzeeNGV{zQ z1{W7TP7&rNm0ru-%THl0GR(0)dw8quIuBeB?J)UKmPwvw$|OgqM3QK*hqzCh7lR&GnvwR?2RYGBNSHp;u?_RVIr5$H~p$aZfW*1O}11kjTW$@R6ul~2ABT$h2+UZb?cMbBTN5~ z@5Q>`-OMmvT*;s35RhMJ8Ik%c5opga` z=~!wr?z8&5%H-|u%N0QnAb73ayyV4HD*m*tvA7J?sRXGVVvzT)`R!_v-}#>B5!2!n z>!6^NfNq`8H}?p;*Wsa3HGxsMXj%s!%Y&Cas z1E^D0u*7fc6JJf+x**xP*^r}Yl~rBL^#smF-=J-@g35bTniceFetpqzd#Cw=-WO>V zz*N&{1);0&0u_(mSkj7s`Rl+4%Z3mC=k(KE-i?{p0QmgNK#MJwGHMT^Ben1`F}d*gpJ#6;a2Zq^ z%XPLs-I!0}lAz6u;A;9OW=jHo^{amqh>b-BQLDW{^m1>GNZPwFG%ag?BMP{N{)(xY}CL6OZ138fF^ zisD!fKn^$9I3ysHT{%=6jSF40RUWq;^m_@~jj6OeK`Mx2Ku}lx{I2NThPQ&?2Nk{AEtGYQ6zG}KeKu0se3`q>|3+OZt?Tu+ z&2>Tw*^z7J)_1~=+w2=Ht^VC8F|sgOE4%(T?9Q3tI5r$7!Q3?s@xV=BuAf)diy{-M zHX)cSwg%liqCi5vP%Acz2J6sR?jI=_ej?8me)gb(@FV1&SZ40?Nn(xl%)XZ5`YHr? zjuQ0h-x%3RAZVV_+&l&yF@{($BLGGtL6QZ<0SbhZ9=WCQ5|jMu<`#mNixp# zWI$LwYAgc~P6C)W3O3v9Zt0OW#k|g0ef3^mOz_I)h#6~?N;*t(ctDOidO z>PHjdAw&7bhso<$%%q{^{bo;H`tpR|DhiodG#zDo9*nI~5V@wG}hIZ`7j!F5wWEeD*8O2V1gvz1 zD!U)AP6+WVH}umh7c^erzP`q6^n66T-z*|5i3=jhH}NKt)phE+n3LV>C9H6ILXps*0;6oh24I=!C_=u55im+u&T!%6giSQt#pST;!_R=FbBuf6lTwCIG z=f`*4VW^9PK1||+f0cNCswyuxmk?lJiz*o|i7w305fW%XGx;C(S=x||TyAKgk$B!J zWEXyn2{~d~o`PS7(0CWghaBH#^Jj>(<;R!pKjeM&IWAJRaOC{A+=Cf0daQc6fJFfx zkETckx1HgPDYnRv)QewU6NQL5-3Ag{n2vtLto%C2$Bxb$(@R1aw%s&Nxl^7mTI|!p zMGSMGs1**O*KybT^9CpeewjXOo;6=-wRRZaJ6R)I$3^_45OJ+oj7DdPi*3;Y&^C~^ z+8SdobsL*?te1=E#Ga*9abvOS7^gE8;MV}?v8X&%wtd;0O9h{iP$;u|j=V--Id_ab zJuZeAV?^X&`)DWEXcUKl8qXbWoCL98>(T@G%iv|z!ckX0^U`)iM-`WF7#r*Y*m1k>-vM4^RR|a&sKZkX|{Zcn?LAQrc%UP9aKn7MH zLX>vhp=_bC%@RTpjUy#cL)<*;i;6*qlvp3h&UlKg2{i`+|gQ~8&z9iG_UG4eH= z&93u6TDcQ0wbUf5n7$&BCNo5G1E>;=BW@kQZlChm+%0$ZS<|>zn!=F)1T^NPtgWmn zdm9coyF3n4Y8sIvDnn~sIEhbBiitWQoj+KxI73+Jv{-)fq?D2ejwIm@T6cYrTr86I zls+IX-v6l7SaeC1f+%XYXjmcrShi7Uzfh$e>GJ)zm5tA>k3DXUSpkFRIm6X#H$>|z zmrRep;{9c+^Cd-skJ>=J1J06CA{Qmzu~89MN%q}ev#IJH8>`$#e$)9?!0EZ)V=+LY z{||lm{Xy1pa0or`3tzqGg+46{!r;aq(oR-A7oBzgiOfYjs`!WECO;=iXAY~A0`ne_ zanAzklT*LSVl%1ha9jZ{6KJ2-x(g;((3ZQLO$|;qBnF_hfTrIW>$+-5eq4l9@QBPZ zz{D$GfN_ldPbGD56gyD2xP>AVcfo*DYduwVo_f|SI+R(6Yn`dv`sij7{RgEN!tfxB z$SV?I^&-IPC#GiBSY2@is+qHnt|ApcQy7^sp&X%~{0beVr5tXF@4P)pzV}CJP&ea` zR5OZGw5U|7FCYK&*srJA)7DKTB|5AaGE3FZ_fEujgEH*QNSK=1l5)gXCfqbDuglqD zr0{r?&4s906BtBt;bdF+;?Os@XQaf@dUF71&|J}Av`BO6)iEC~HZlJg`o)9~_7+Rk zwDoHzkcK2jPGryskR?Xn`6FeI3N{xGF1TMvIniXaC*cj$`Odv`zA`nYvA=q01p6$I zJ2G^b+%Qjpqbanw?#91*nzJ7Mp4ybybC*N!sphYcF8&h#Lu74|O{X`0`8J%FxHvE1 zaNOq`-|fJUwoD19qM12Z#LqC&_Zs}8$QX?@7XzIvhEC6JsQ4k@7gsU=yF`qqgrpc0 zoFIu#gffQ;P~3b`kbf!b@KR<*By!bpjlvjG{P)e z)ec8&W*wQbT#Xxs5rZQo`a`ub#Y%x5#~)&CQ-;gG&cdn6%T$klqOIFAqIN~uw&jGp zh`}Y2)}@b+M-Fc^q^{P%3ehRRw~Ru@Q;aP~iaMF$Lk^V{L!E*FQLNnc{M{4I4wc?{o)00);~L=@rBehAj)Pk?l(R1F?la^+{rOS*=4?@|42N(9>(M)EWtFP;9j5H_ zZ9bVnA_F>M=mYM$M@P`h^kd{f5dt&sg3+#JR)7LK5}jLYGW>c*lK08n8*yURyhPMUrx8aY?OZN-#=1CU*=O^ zpmXl`5KX3JX;rs{e51W+YQ$la;dlVy_lB(D^pcj`ZGKwr+W0I$7t5E6GBJA!zA46QivKfb4mGAvkVxjI8#E5UtY|UfUAqoTk3YA z^>l&h7!6xU7Y=(Q+69?jA>Y25qMsbL%4b`er@&8O5dS_pe4HR}@_CaVsV)68xCLtd(8@wd#D|C;@3?Jo`I`1hv=-An zRVW7#iAQdXEUY9?^bwp~9vgDw_8ppZzESb1{FP^Udu}u_N3<&k7Rw~&`9_R|Uezr7 zVSYaueGe{QDub}`8-li1rspsA4{hGPUgbbTBe?@9xt~p$MJ$xT@mKUVt#^5j1JlD4 z*lnuXB4=2!4U0>6Y0`gua;6dA8{%(s+$cspX(m0r$9{L^kVwNBYyIp)7+UO_7IWmx zb^SRHb=k4+UeigwzN5b!{2J+Yz1}YtJ9bxn+1^;51Ibid5FV9Q(L4Vq8IbUpJ@^j( zSBCPfhMv=XnhUKLG)SZOVj(7rDxg+Z7oC%JeVcO0;)>Ksgv1TXBpK-wHOKp8f0V8P>8KnM8r4I zsWE+8bv5whFS|Hkuc0B%DxKLQ#%YFTN*!R*#AX7C-&BEzMR#$icQJn+v+H+2;HXZ| zB-6!W=;QPyqp-&QbTOYp_I}xC`@`De5=nnEY^lRsVr+64msoNaeO{{~ac-yA0eSXj zinl-|N>EC7x^2|CA2#ZJIEJClQ-{}Zh8kL|`qYh5&)@b;SJbxX9Dpr%!Fefzqz@g( z`qTGTOkiC}k3kZN3zW@SbsiA@%*%DY&a*2NTo%_9_#0;ZjvkZP)8CB!FDRfic0C|J zYv?ut>&a)PrZnY(&X4%X&7Pa5-+7=jBp2jNm`!KKMJp?%33NMOOJUjm^SSwol-EK~ zLPGEVcxP|Fe$>f#xY}$94)kgFusCZlkIHz{u9QPB+1-`ov>{QGXuSlKtb=ZIx0|ME zMTQu};7U(GZXK&+jCvyyDh~N)G-t;nUT{nd^S}zDRs~b@@#)zgW9`Nb@85VtV?p=% zI8Lt4;>4mr?1CRYh+v;-$1Jl=Kn^|6ocAcfulTPkIV<8?nO_0{W6#Q(50cHl^Y6!8 zCAh7%^Y;!*x=j(Cc#iM?SH7mj-YVE0QTAwYwa?@7o&W_ut+29Li{07ZD@0md^107U zQ}n4lM8xIcdUC&iEbet%5cJ~3AePZ|nlXDu+T=@D%*yy2X=bC*j(O&LW)lV?5DwLq ze;7M6iP*$_$OU1N8^%LY@~f|A@$`&+1j+Wun_I@{TQiADaC)Cd70v1};o4{&!36)c zS2&|x4m?tVWL4i-i$g&92e)hNtBsC8=>$tBBgC;Cv}zRbP5UPPtY3CRPwAB9X_J>> ze4zX#k%IkVGY|+huLV43jaW zAu3oo^ji9hFT7m|_Z@Z}^{s3)bzZfI;8{Lw8oa&YJBzb;Ck*Lj%HWGgDnWx&?)5*t zC*8!OAcL7!%|ZDMv(7qhJj1y!jPU_VZo*}b*4&$czuKs;=jRDmQ~Wr(VocUl@2KWB zpW+5kkR_U<)-|tX=T0f&Jk$HFz{p$S%w++>qIAW=up9cvw+{RLMpdHvGq|xip9Nxn zS@r5)G5fD2uU}<=lxd!M-l)c`#OC)H@|g05p12;FJmb|>)!dnio@UqU%WE+V6^VxF zNKmeQ)sY!Rg8w}2=1kgLNIy^EQSp)FJm1GF+|J5?fWVSr;v_aW)^kn6_bn^f&DG%Q zj-2UQE;yiBU4D})4v0@zp2dlBjFe*->oGHAYaj)1w^^WFc&@N^hWeVj<}GbJ-%r*m z4cibFVq7Z&S-_WjI2>*&ZI1*wVPmG>8N4s@BPQshcA&6+mVo0}%g^ily)4Tyok)uO zN!ueB_S%Pxpt(k{OX)Kwe(Z8gQKRHasg5;bmLfT`#*=yPu2~*VVgA%0Q1ar)wl?Ut zyJI^2YRd+sB+1sL#jAT-kIH)qo*%D(wg0Ryf z*(E_S+cf-uB((%T#+vRcvistFM7(9q4^^_*U5>Qg3l{MNlNh{!jOX7efU3m|MX>}U zgj|ApF^Hn~hhR%XDB>D$P9di~8ckeOvtp@4qV`3)wKiGjXXg?*iq03}1WgV658UYp z!8~noMTk4m@JFgKReA&hV+SAwCY-BP$fJhCm#U8}S@GaokV7t9S1knrjejs>jV#Zy zelhBW)Ky;PAa7a#PGytaQvO@=~ zZTq!Po;GgBp-Q7A8}^VU18`r1gG9=@A|@;&h^T^eTjh}}K3t_VXlWAm+!C}Med$V0 zPoER8%9dYr7JHT}Jj{Ov+>~=Cj3RFVeD=#+)WYLQr-~@8z<@^z+edsPA6L7AO*rcz z9O1M`8~dy0tU<@{{CTgX3%aQ(cXh!Hb00i43Z%=Op&hV=e0Gb<8Sa8%J+F7*MhryGAO-e`G9H?vU0a~T1)2XM2N)hoUY~|UKZK~RQE*1%AM?DM zM~Yi5_Lszw5}vdT4pzKlB(~{9-6^>_{C#W|s(34Tk5zOT9PGeNUi6RTVS7EcEDt8c?P4RsQ+L$me8xmz9M52st7WPA@o1uf}IHf|yY9aV0B9pt|w2b9#z zM1xuZb<$kp;%BWx0l;~zRFD}}vn2B)a?Vn~05jhtbKY>#&l-r!>gjVPh~K5!Cj;&? z7R4a8qM!V%Ax5|lofv1XVfh4CshhcAljTh4Bahw1Ll3eBC&K!Kr$iV+JmHefZl0BK z+v0-!h9_UVj}R$A&IddK8ro!YDV72s&c&BrnxgGtwtI4n)@u8(G-#; ziCsLE_Bs-E1>gE>HXUuf=>Rf__>}DClPWG>!+(RthnvQ8pH>{xyuQ;-k}qvM8`%;1 zl@%vo(S?T=jar67#shS0`me+MYAQQ_OXE>Qn>dp3B>63L&)iUU^`S9QDd7i54sdqx zO#(!HQuBn16EVKI9^+eFSlJ^fq*Z31zC-EeXYPMPN6G4?m03_!;)PA>KviO>^I_TW zaayXQ!*6%z3Q`QKa*@>jLVRXXoYheEBxtu0>K4jw9e3|5!p@`k{^r$oi#P!%>+53w zeV>U=ZIx5aI!879eUw&}z(qqf{FNhHeJ)v=`+p&K=g(QjBNndl`I+`9YI?#t;mnEn z7p-1~4XNyvO6qciN$btE2(xYp^Y*(B*0KD90+!K_CzCP1cqNKjXi(dB<7~Qv)Zrb{$y_l)o<&Bl^mXV zokBBGvEy%gQ%%qL%4*dbu|!W2MQR>BwXr@UH0}jir|7F1X`-hzI@nJDTj}J@bRJmq z&=Hi6Kw?qOb^Mi_2(a+PYK-GHYmNLie|T#gLqB^#ilPiTsSZ-tj6`S4V;vtsCO1al zEz81|2@i$lX{XXBn=>cN%^iG_f~;LyyguQlgOL1;S!NvZE2QXEuIYZ{Y*kdIBml0x zIN-0XAX_qh6K5uwU!**ISJzZ^NR4`nLhg~~P)O0MqZ$*V6Sd3#J7+8+PvPJt3b39NLBCN-tu+mK4J@FUev{PP;Bed% z?cf-(9v!7dW#D#y@Z`j#*`n-$U9;p0E^%Min+dB;lWDgyWn_BCQzGZrjlbZUK#HVR6 z@Ov7L5YraZlx0?CkoW(KvT|wj#8!fmz7d+_>;(1$PJ@Y_7&}^$M48cjg@UMgJ`+AGd_RW`L zV3tJ&S?`){jS8tBfW)5s$G?DyV3m(ilR z&=o!($Ta%rQB2Yvv|-?}r!E8jxayDClDIFwTl($Nj(~Q@bo%p*!;te|9??1SL2SS) zFznoiI>UTsC_i0sR~RhrJ&akfo2SU_l||mV1kr^~{bD$~60IY)GB1jTJfJ}!$V|f+ z#l=eJp%WYlo@IouZ{mTxXIH(-D_hX7fmp|2>sUdwpWygu82pK7fnp@MF4Jgg0<_Dj zm^gfdlBQrI=2HV@P{cF;`sV!uWL?RJgBc@?oRl@@Fs!Soa}jzfmsW7zFXf^%o%NhT zH0Q$-1rfi4W|@x|KTKBk<4^kA%T`MZKI%yRlK4UQaaIHvML9~6n%Hvgc$-d;2aUfh z(GjZZ2l#sDAHIrL*Oi~9Ow;Kkcv0j7+SNY)cc;j;iL?~cu&)e`Ym;vGmqzIhitvJ7 z&J9_)Ts)$?ew&dcVsOSk4oZQe@qw>ax^Q}^w_Xhz)b}$3ko1qwQgJov*M|1Z^THh?%fcJHz$X#jAII1cza%+P12X2AGwm{DwH)jo`PO<@Gie`# zPY|FIlvOV>OHG*f&cIy6s~SIo#kZoY4)OqR(+rniaU*R7=P9-_R!2kj?LTh_5ia*fXd0krPXAIgv3z(DNXZ`4k4RrzukH@Jv(a zA~NAn-$Ex;1QQy#NeIcjJTyITY(%h;+)0KRD{P&h?eJnve5g}4%0el;tQq}o_}Fh( z+_JE|`fclN+|?Vgm+VeX{IvI{N%E>T4md1+*R;ZN>Z~Ot_7&xHYWd=}50>88G2{zv zonT9Nu(U{pM8$V{?W(RNds-F-bu39|KbI937ArSz5z%4 z|FW?Op&*#R10>E!X}45)QtT0tRLZxmpG)iE3>T)msxi=!-@JKgyq#FeFueZu$I2zg z2)1A(;q;NzE-{(94x$8Ju$!(~!VzPD7GpMADSI8$19T49TT<5qlb@7dFn`HnO%Rgk znPhKgk?2x;G&0e*_=7196gymrUFryi{2Sp2>N01j%79zDjyw7hRo)@t zg87!@SR!7)k--1O3FX>*>-dVPrF&A|(z`_N4zG(hk4Z7w=sB#<>9Y#96m_SsI zW28eI%bE=ELDE~|sHAgX@S${J`wpKut#%khm^T26rQ90njrr*XXydj$@2LYD4hWP} zB$TQsii%%zmtQbt^SzQw4sY;4c5n%Uveu^47r6y^{Nxp6IWc2;E$Kv)yg{3$2Ff`S zWp=cL))!VbO=*q5s`JwCt!g}KTNP?Gjmao7q+csi)JtLrPfKEfVcc)KwoG>5#f8mM zHyC-f43&0clH6Lqy-$H%UI`R%G3myK8qFqCYNHBSXlLh^P9Y=GV_*tP znDlG!EIJznS!G$1{)V#N7qlq_Qxa?XdjyRNjf2{RJi99 zU@+Z6F2VS^SNchu)v*~7brv3??N67G5PUd=h-V2V!_ieb~cSxQzu!(8qy**BD$ICg}bJ2zk9`;5u8(MvlJd~r$`&V8Qa7*QN6 zOVjkv&-r#VJ5cV|nJ@ekXQ)_t^{UN#xHkv5g|MVS&_@`FY?19s*%cqW=b5JVuK2P2 zCsNKOwBdX~!sEoJL59;u`?8Y}PFEaH_HclrA>1Oh1)+E%QUQ>WzKan%rX!W91}Mk2 zj~)sdKfxe>x4OlvspJV>i;)og(0vhD&yTSCj|@?GfXE$JZ$~{?GJs4MUslz5|Fz$t zawGd!9{fIdW))e_J+uuYckfxsTz( z&69_<3T4RZpdM6CV?AH;>5tTP!edlk&>~WOunkVkvAq{qDCR%L|*a}!6UxZ?O$pEX*1%gxNa z8asPhyvmIIks^;1Lz8!Fy)R8xSQ`m^&GmalAQmC2CDQpB?M<4ws@YQ>EyX_FtFn1~ zu~ds<&`y!Vh4=>!P@n3D((6t%5?gKphj^4amm7&gs8S#J1 zGK^&iZy2D(5V>UbR4>PwVs4!>*>r?5p{JB?GSQo^BtglalaEI^G*5i`&5|`8c7Tpf z_u-PweO;5+xJk=4`jepZC8bN>N8xd#z2J(0gA&Gd8!+D3_ul>e^hI@Xy+2EA8Roku zr#~1gAhOL;g>sERB_ytoAWnY}Ao77Z#?WMC=q&xZfV z@9P|1Vvun~@HNHp{wUi>W09hf{iXV*4E`nfsbRj^XGIX#Z#mH3+27Y#O1oK7JxmPm zvY6D}sRwnz$;3&7roEbCYAQV$YaRjnQGz@`Nr&qVD?d+Vb%9~|`PC!C=`yNK`8_lt zt=G=_f{;t*EJs*O=G>eEw40$YRP<2!I`*$79U$s@I&y*h2{#|O7j@P*_3a?ln5xJXKzSp-Z#R7U0!URFNJi!SuE417 zFMjeJK+74=Ebi^fgKCXsl;-r3zR>tJ^N*Y?QOk6`SOY3Q_$c+-gnD6?I89=RNaHD9 zgjPa*@8li-s1p#aOw2}%eg7{eQfHaf2|t=`49h$Kb_@!h-FSEbL4q-PyHj62{Ua3| zjCc)^3FZvL2A7u~GpN1e*zO$oyJ9o-52A)D+PBos?4J^@9TdH40%C3vqw3UJ+{Fv$NwpEF#;XuTj5$;<&SWG3aCK_X40gox2|a$Ke_ zS+XSW$4))CD}xK7Rv|PU+TvT&SFgI~{e5l;q_?{9SolIrgL~qCl5J3U7h})W7{uv7 zl{kx@oZ8w6x{!s<#V^d+KDBqq>Gmy-&5DlYdcW;&aE&M$*_`}uiwIwyqMs5%tp>dM z;!G(CJ1`s*gN+1AqHX7<5Df^GqB3#Z*ReEzg z(MB=)@`L=sv3hREe&@<%&+I%YVenmfv0hf~6K=TaxBF_#X4oqe7Ig}a z%ZoUkJF_};j{u?LHklgV-C>b*+kl0A@4f9n#}VB|zF{~dwYLqQ`rp->e^JwVe^*6EXc zWQy-F!_{bQ!a6J7hiUl*cTd<4`42i5eQ z@$-E-xzT$@PF4aq3EHKidXi7jfMKWykg{qfGN>#j|$A?%x@-(`5mE!HX7i->7nJTfM z5~voE=q0}eL?v(+EpvGPH5Zy?-YqqQuoK8&h?=D+?g57>mAEW!3mjxWTi1nPAbg23 zowa-nI(B; zU6*rnT;aso%#OWW6#4FuMWa58~L z7VLRfeD!72!E4J<@vEyvBDP2KW5dS&>=hhPWC`d~%BvLr(AA=n$;AQSwFP0P;1{IE z?i*hSg8I~#1t5+Dk$!&XWrR*^2snV{d8O==$-W}~CnDS*jtHOvgPOc3%c%0=M@9{Z z4$q$7o*W94UU4AH(XC{PoWY{3dGo~YUpT*IBADb^%!%CArie9=%LQeEkf)M&MZaD~ zQP1Wg15}ZBeAp=7JlJLaNS#5oqUNhL%dx)+JD?8ApkV%7Pp!Bdikz2}-`NQ*1PKKo zj7=I+TO>_xT_LAOA$$=@2`~X_SeWSXiXDxoAxbskwyFAu6cs*h4DTUk)6Uf;d)s)l z$0T+d2s_TZqlR9HqcF;1P!^y)|4y<_` zyGc$^0_~R)9I!rn6kPe=`yOWHYPh3^yFvG~rSyf$I~5ob_kGwl9jo3jxH%*YkyWi( zofm@i2?0m^kDozZV(jpcfU+)_!wI1xpFO!HOc` zScLewNWs>0>XfHc*ZGV$dnB7(Z`(Fb8IPZm8-u_vD|asoiv1%se*bDG$Xi0qEBTto zN{XA|kzeT%6dQ*6^R5_2fzbpqv3jWQ=i&1P>gLiLq!qa|{#;a-6lp1f<(|$!kLl0c zeRFw?e()O);euPJyIJDe5tul@li%t2*+{9kSa1S1IHYOwM~Z7a-1L;rC)=g{Ho$+5 zx^Z4}Kzy+acTjSMgzVv4TcUzaEVM5vE}F{KP|Ra6mJ=S+uVat9UIhIyM!DSo)N_C= z?{3@zKe|=1q2}$5aQ3DZDDgeDrVaOx&tSW}dcGq8meh$R@HLwMNPV6_GG#@r{@RDi zh4IB;rG1^85%<=Vzi z#S%_AxevD$!<<>q3rOiyakEk1&2gv}HFjG~6W8~U{Exn{9+1K|V)LMca*v^lztSkv zz)ME@1lbQZT>WX`2?3Kb&aH222WnIZGU|!q71Dm)Z^Wnhjxd{4kCrWeq!xTV)ABTI zX2)9?Hx)nVLru#w)ZOdowFkXE;PPrAd131J2|6`8(wldwZl;(p37+jy>jHUb~cQ@mQ!>cTPY~=TZmP*Tsg0K!78mk4pr;GBD>cWXM0>)6}Qni#dW7^-SEhPrar>ZkQ|Plt$jhwjP+ZUKMz ze(Sj_p04yI08W0r`H7b5zn7rb(JP+|BULhrYS_$~k)GWL;?{djFxV zQQFsE$(u{}AgHR1Ur8ptu_lw5+$-UxM!ER}!9TaumJ|`FLVc9wot>wvly2>;sFgG` zKAC(0N@};f9o<5jm2;jr+Zoq8oKhtTmNAz{uSL@ar=!}B?v@Ni-n8#v)8`Q`l`8>< zyKUVKtilQtM{f*5M{}YkEVppnK@nNg=boSs!t%OSN+{p0-U3o8y^TFsuA_@z?N3~V z&M>4)O_|kh$sp!-arF^5Nb3~G-HUsFo>t4SsBOD_C$)3wQOayKWFfx>)VM`LrsAod zi(@yi7u)4dcf~M>#>|`*N+Tu_LDRjXnB|1KlM8(&Xr>L-xmY*|?+A?sBpY+a^reaz z$0lw%Jgr4vMLD(}35W=WzNqG=sOz8&lkvG;n8^41|90^*iM5dTBap@b8`I5pOs`HCGt& zKhOQ|9qxEg8c8=A=1QwvRzCN>w|%;Pr*Ea}0s694OhBJbZhA(H!hyQRzQa$oZ|Fri zN>2kk@c$;P)V-gssI2fa%;;2uT)iKJya?fQJMFp|pLe=tKS~tdvkdCxQO*op@1mz6DGPayMK%TYXY4|cA1xuKK>=_rvUg`|SDOFX{C(Kwr~92X4v(d< z7KHUE6#U00RCCa>tkd>S)j>36^mEGDkhrz}#M~J)RCTf5N0ITKQpUHayvOTM&hyE_ zF^5&^1T5MTwZFun+m+wZHq7D%p3kbLdNVv#sq$>D7X6AU8uUnqI+++ z!!krxK^EhD=Fe#JsXKgkB4^qvKg-E&R+0*vjpEV=XfH=+ZD)mz(@!ilL)z}1exf9@ z43wE*>*wriTWo49iaa*bQTRRc$lntE;gJ*xqQKXMosp68`2CL~>2`WGqSJrxc+SRk zMa?vwGznDSoa1BibYIcUL1LeK7L8+kv!t}d&ulfK#~s;Okjl_?UcqHed;n&LY|Ab4 z;qQ1(w}+irevL3!c8T|pLCi7Dk3jZWhgNwq*~$4I+jGfh=HFS zwk><%?eDn}EnEfzrmGD6sUI;VKx=-gK57F+DwOdn9LQA7b0%x^3%Xb<$K{0$JhqH6 z>=o#Gsq7e??QT)-Um}bw(}_0ZjOBTko-fm{-v54M?|05xhwE8OVvo|TJUWURm5UFz z_7D?F^8XK0ZyJ|W`u>kMpJsB}bjnOEm6@!pv`o!iWU47MOUBA2H!3r8%gB9!Q(0PS zP?<}nP??e&WbPZ38!zW*oxr@eYndN}ueUGMdce`N5~?I!b@ zQ#ItVwMD9l?U5lYNxc+WDfPX`2YFQreO6n+f`$qB1 z{0ma6(6>>HBAfC{+{TX3jxl;aDE@kJ)ihzEOBy+6E`yKpX*u@q ztNYjCyBr-F0z^+b^${lh0ktTw}HeMi-qr04Qy)u(KTf7RpS{Hj%6 zTXPYodc>`QJ?}Q_coouz)o$AgacD*I?TzoNIyOdp)_W3pB%%)Jb9M*ecn(`H4_-&Q z|H)`;WOC7kvyFKIkATY8m9t*)Ur87#zh_)gcgshamntg@$}IEpcGegwb=JOFe(h1r2tCjRkNBQH98Ga>JFXPk5 z8KeC+p-d>${h2Ha?T9R2+C6gVu>jBDmzLG3tWec7JO4*T zPBT)XNy2rek(h6Dg)35&tl;UMJK`+f9Q(IpiNVy8arEA$C{IWpPXar9Vu@G%jMuM8 zzu1pGUQe2D!$v3kyBtL;81Fdp_U=nyhWaCIP3sF^)CF9Rv3;EXFa;Vag0A`)6gRU2 z;tdOc28W7rC2vB{9;cdT^s8Cv;UM#Vu&7~JW^9anwzBr*us`+t_0P7Gwd#>@bh?P) zJ;A}*aNjJqb?3dm8Ks$Z0dIBuzG<8vi{PNe&DCoRaOOA}J@=yetM3mVjh=@FiCmcN zazSnny=ZRb)Z@^7jN_lhx{NufPa%(VD5jBIW*o#38@Uf;Ay-4r&SZh-sl%gSOIi_U zB@WtN%4N8N*T|(%HVW#;An{ZLRY>Yfpj6@wl@1(YOAT~hXCD0K`IvvJc(dvD$tL#T; zKqMs?a4Sz+Ih_{|Nu3p^t%^?{q{PEoamr7CqwBDJ`<}W_ z9HOs^?uR>wFI~|#-`~E-kLzAoy2S4x`gB!MSM!D3*qM3^CgS`2gW^kg z+n?J0NzvjB$Pt;o)?_P#A(3a3&Qk*xX$*bXbhym5Mf0Ua0=r>^&BU?4m5lFwHhLWp z)095)xZFtcmjowheclq?e)hr)m29&rPsVaPKp0Ej|7sc(LYFIFDpJB1&K~0EdGX@& zOp*_ao8)Cpc8#e;h>YHI+#)!YHXT8EjfE}9qLYstu6iK6R)04_*Lp&Edfn$wYYg*= z%z-z4o6`y73aGD&9E%_r7WJ6VmjU0&IDK6cCW(Z2tqe}sAC3Qtj;JuIChVe`CbHFc zDo2_^az>^bMfS+Yrh_-KcMaYQb_lspkGkAqextz_RmP{%*Yu5^XMY(Pyo(;3_7l@x z+8xTU9$+uxcYj)4znE9zSeSr$`z!46C;fYD$Twf2>!30h17UMz32ga4yy2Z06Y=?x z(Ho&uVdymu;N@}$XybTo$;o|F)GeJyUyhwK-=YWG_&AIgzyH5=v-(ha^eR~D%Rl#jlEZNa{sZIc%3H|Ha z?q0%}Nd7QG9RoXMtIx_LJJtQ7w-g?fUEglT8l2#a^ip#aaRP}DdY}dF_jA=-yj3P= zPp04QGBW5ZnFK1w4YyVstEW=;O+i1sB+{UL`~btMq;(5dJr6Z<1q}>kdmH{F+mC40 zpq^H`oWeF^&uI9TUIJE_&__lk+T_^-uYnGPP`QvF4VDN?Tu*aqx3UdQc=8d)YK|Gn~vS1FJZsQHR`Ru4kUUiY}> zqebzfd`K1888CuyR$0G1KkA2HUd=RvWoYV4MKV7gvk8FEUBPeRowPRU;m+E0Wqoxp z@kTZU`VP2hYNWJb+-Jn7VyKA;HmAD&$!^PftD4>wTsvsv!8JCUNdJ7!uH;yiiuqb;RGj*lvR6HK8htCjmT`uM`VeA zLOM@Q?t5UeKU@S;#tq{1rJxFa27-YedYD{?)x#n5Y=AOao@<*myRF2tT5~g>HmeD~ zL3U=^xD54;yDu^Bl#RT`S4Sv;l~w|(5I9$sxovO)%q6ZE*5DmKrl#-E zYFB|hZMO}VvL>ume*CxYoAB(h^5rQzH&;IN9oJkS4f)wQ#~?^P8p^7mIOT=s|V zaIvD~?C>8S*O`cTl5jk43B~l?uGCqfZCSYXL8BZsTp9e|y0xMe>6-2}vo!M~$W@Ga z4PY!3Y5TwR^aP4=>NVegk>Zyjn!NwH@fepP{b3`=QZqs!xLC9Uaa;^0(LR_ql8nFI zhP)D=u3{dluPh8IPVd1ESD~6lg^cZut znVJ-H(v`wdA{vvY{GSkLe^lI^y*0?C6+y=+Me6qIQ?X%toX1bNkQnBA{?!eIm zwB(?d+y7}WEgra#$fbxWr1RxY?lyXtzyF+jy`Io7pE8K@|e#_aB zDUGhBg7cnT4Gq40FTtQ9qfgWn@371w#s^!zu$stK^KB%qs1(g6m>+Iwku$S3tzFu! z7trl73~$yC%_mr`o8aE%aZ=&e;4SJ-VcAEl44&~mP3R>G|GWa;iOKp@kG~K5;f~&b z|KC`F3X^JWU|{{}D|Zg~0vp~piTXq0@G=JTpnaA$@=`T6^O}9hOAHi=27@;@S`I*T z@>#1=XU*+|U6L%IWU4Icr9A5wNl6A8r>1*fM&7;tCFU0NEJNc5qiS0C*n!2&S6UW+ zU3liNkdGRW_22RM#|8Q|vp|42nbTEMp9BkIK_?pl;}spZ?;NLwE1Cy2K<{sPj{Uug zG9v1bdBMd|sb!S{8Z~^*EFPh?5L<96D^R6uCpM9%+-jhJ)@{UCk>N*^Cl=4$p#y<6 z6}K(TPtRliT7Ts~j@8bM90eZuqBJ}kj55AA|2-Z=Jk=4GjsUh0WxfUAu(<?>-%}j-{9vI1Nc~oa7zFk5L}u2n}mzh2cCfgUdyx7Tmy*I zwBnCmqlVc+mwo5v#*|6|cm`jfSJmpW?`|N2IPO!bFTdX=aF-!W#F{_?JDGuXI|A^tBbTxCv30hX7l!p(*PAU-E2|G(xR-=@Xy5%w@dp z^GmivrzJFU$g0qHM*hnswg>bS95)(#CE8hFVwTbwCro%Om4Sj67h7+XXj{yE8;HrQ zJUieS?R08z75LzP(Xu3`&4~xNIr@|s9`Vu4t6CVTTje%05_jI{);;O5H)jZv8xG zibMirlLf;;YNy4vQ8X^uYBC%Lf%E-2(1}OOkG>0YfZ{k(6AM1u89(K|ycPR&GW?wT zjvKY-ECO@5gQd)kNin4i^^`W_GDyz&7^zI(DE2;7RSIYb@H+${5Rx)$V)6H`FKX5{)>G`?<_Y_I zBQcxZQnVw+ZftJYoC?SyKL4uM_{SL@;W&dt{n~kW4k;o#ul9X|OX!7&Zs;nMfqs^g zKPbBVtM0!%TtzGnQf$d%RrWQ>!qU^~+njH=A%eyD(n<7YhlfFqg?Vcp@1Bbg>JhZT z)X%VrtS6Cp^_X+g;iv+u+Nz`}Hq1}ofOL-gOf07Slm3h89d|qB?YIE@#2N`}&K;-F zS1k(o^Tpzyw~^&M78ywc3KCqWLe+oNWF2k)Wm8;uT6niw4SMt$4<#Iv|4qf|xU_g< zQaPO)4svRPsG1ued|TDA&v~M*} zQAG>5Rd$#WZ{E4=vBh`*?fM*fs=tBl(>;{ww}OI&mYN2^wOol!ymh!q6)TNB2WQUwfpw<(ykE8 z9iAym1Sdb=C1iu$W5v_*>jYcq44w;n8)}&RRuwq%laDM1dpLOO8WpL}lH6ANeD&{w!-!y-B(N~|PV60w%@E!Ui zc6din@vae`*Hk?~47>R8`>~yU98+Ptvze5;!h>Al;a(8lT4Am>u0+1qQ$+CRak;D> zP^YRQ%ONQkRWb4k4`J&bqE#-bCS4*491nQ$ak0@`6Pl2qQO{je8h8m`$$cV6Adx7E zft#V)#lbs2vtHz6Lzzf{w{l85KWqHw1cCJIDRQrj!z=*2f#tvPJ9XnH=n$v+OUL6k z%S(@rDdR?}5miFIh{5TlJ%3{VT^e@mbsp^?hpojaViW);P|L@WsTPDCl2?NG&t|@{ z##8vMfonOK5};$UXQWz%q(4p#z#Zsqy$vFUgT)c$mP^4r5=G2}&bUK7LU+WW>_FcYfwyY7k%BzbOtwT%J z6Y^z3G5O*C?dvn70JMA~rrm{?tbf8q!x5!+K z1z1?`D@*84?C-yVU=%1IWv%4@78dF^;(6D42;()>UOQs3MX&&A-5C=1$1wxPXScCN z!IR5#w(gbV?VD~ReDi=7-aLPjVEXircdPnRJcTqH{I#pVxR}NRVSRGkHoV1EBo~?n ze3M<4b97LR)LPz;*m-5}ycI2309FZ+pQV?SAH#&7RcLW*;Za)>2)2Y4;nMslXhn%m z=5}QD%0=^U<&0OHAX5tQ+GKsDJ6ED#pso&W0}y($iG((m@}OsJr?iGDI}euprcJ*% zqDv#`C9n!=_>ZvaTOwM zrJ2kX!AFiq(NQZHD4x*WfpsW6fg+28nWc5itg5E3+0x=cfT8gUZ3OpI0t^nGGVsPG zIR}cJVMC&T{&`QgIACNNs#{^en+z+~vhm>{3WhMU`yg0lTFJlUc;RiF%x)tXl(3Gn zi_Ztt4P-=vc^Tl`qT3avxpTvAya9BZf>;{k-bKgPUB)!$A%(y-xdr0NzLsTQBJDyo zt?G>mV?z_hp-HF^K?xJ`F8;27guao#H4Z8fEqExiBhbzo;`M>o|7S*`)Rm4rG5&^M z1F|?EJeD!(^Y1L4J{zvQLsSA_BW{1Zmn~5|fTBV_jnwYauy0nt4q|=I%^PdlcHv;W zs$|0L#k2EjTF@sVh$}L;5JI#NBt|PZ%Wrem>RA3tY_yd?kg~fI>0n(t>h~#TOw)oIs#KTOm`Ny(j%^{RBXq!3Qmp(u$nD zm+jqkTN-<&lb5>5j&`FhKlRpefNWtS5Xl-5L(^&*ET|$AF?Jr(_Rd3estkt|)3{o6 zE^mpovRITI-5lhE7Z|s?#3~GyBr8Ac+Ya`?G=7JMLtsacZoM4B zXCMX4Y8NL}649h~p|1c+g(M9xXxh`bBvuu$gPG3s65oERD%`(f@DV z22A+Rm>pk=VQN`gc=8I3$`i6=r!x%WcSS;z=lxjC9E;eOYl?8t89_2Pf2bJHeAVFzk<>)#m9eW?as__S&fr`47Uof~H~%iwZuw~veVH^+juLA8R8@$eL3sni z6aPoTWo2>YeCO$HB#H6AaH&~vi+%R``TM`~QA0Dhdxi6k)9+k%*rhYI^w&eaSm=4s zpxA|L=LUdEcfNJt9lLCD!nTQ553NCCzHZpaGh8^`*T#LdAnMm_*IO-n)qkR$=_n48 znW+1620Y9O^zZlCY{1ySI(-pS6&qucQEHymd!Fp7yP=}- zZLy5uu6@{ZadpMlKVXlgN?Jgn71WQD($$#S#bXoLTmr_#({jVnItcTwU1G5jcoD;@ z6m6OH4EGZHCQu5)=b2PP31XXbK}e*h6=T!|5**|UM(zBKF@{y7Vvx4|ZD8|3s~3Ju zL~>XNWDd1{kL|N-*%>oU80SK*CdxzTJ$FyXz+<2`0I92)>h*RjnSiMP^~4jCD>m5) z{-~l!0FrFXY05xqK#^L!XaA92x1KO^8<8V5TdM=};v;$NWl|G`HU(tCSmQa5@+lxx z26SmCi;NA4hvbuK;DtR>(81Hwc}Apx{El;i!O|E%_Mi71M*tsdNg^nqgZA~p0DbX%7?Ae2~D z2U?u`N6q1k5Q;{o(tQy66$JhCGC>p#N+|~WFj&`%yx7QdI_%}02HUf8+!bylk}~Vt zoLL`WB3WBi1i_#%$rYg6zN$daqBVX|frid`25W`%W;b~5`nENI1kiEeDT-_R8|_U?QA8lO~!7u%87) zcR1%%9aHG^mc;{~rpUAkv`6j{gN+Z{4Bzn|wnbL)$#U@_T!8btzMY0!Wbm+1J?K)F zs1O#;?0qEthmgyCmWGY(MjT3S_WWnpgPPN4Ce^*tZy{u3=-dFIXn%F2&#uL9?5?Me z^*^0BJE?KZ|}>2vq7FX1?wNjuhWiEmOGT5i;# z+L8dF=0n-AjCyd=UG6S73v;?zeqe3iql|>=(xZ`aLtZ}h@Uq#;DtTt3`|_%7$^Wid z9aWo`_*r!$)ETI)^-NA=V`Y_g-J<}ax2oF8EcKP%-d$}O-MPP-oZm`4`rxjEop3}y zo#-t@S&as=+_I2H&rcZ{IXsVB`Q=x+Y72qB-p+lI?Wn_O8Q+)xsWJRhcIiGz@>YUT zw^fK`KVo>L&i@&7kTiR#p|(Cosz07>?)bse3mz0(Iq1H$@hCmwce%K4S4@cQ0-<3( zUJ6&A_Xl(ZzeZv^K&MS>{^~LJQn`g(ZJeTrh|rZxi-ms7qoQM zyV63grpeHKdQIU>8UnSmPZXbk`dj~hSBd&Y;o&Shk*+lZ#g*)Lc-dQ~;T!=;iZpjO z>2hQ-j!#xO+74AV*p}@cA6|$(JbfboXTgB+x^2P<2pEqT z>0pmAa9V8Wa~F3g~NJ&tr2W3yuD@F+59zbJHnV?BjN z3O0Z9y&N2H_bf3yyGG6GSr~ssT_Vr^v+eXhN6!G=X!?XIVAmHNmAak%=x(MRmNP>q zTiJy(bLHHWVXuv3KYU2a^Y#|Kch4OrtYqMgiZEZ!t>UvAPxs*ROk+PYoUQ5Q=9h|v zu4{)zf}z8pm5wr8nvvvOEa3;oXSnv0Uk%y0=?z4=-(nL6%tZ4uPyWj4P_77B3a*Gl z{w?m-lbica+M5};Fqp)dQC6ncXj30ry6tJ}7WG^_szuul78WNOvvCalwOj2gohyr% z>t&O@r_5ZKYZ_i3J;&ZD2#?vwmmS2@XAhs~3w8@T5A#u z2G-Z+3k9y{Z4b~=x%pyWH+zdbWPlf8E0*Im&TwFovhw75``+K|h_=kqGR;oLwp1TQ z=-JbdrUKHvcNVi?@4;)espMFOuL=Gn9eMH-g&d+V3Ajl>hfSQGb)kS3c5&>ID7WeSy@9V(9tE~ExM*_qxDd|Hg`2o?7#)joFAP8F)CpN$AY z_?_tR@^l~Pp=`$jXDb1fKlFPkzRy3vr zQ_)QAtKgzpnox@OTRMSoetyHX4k+ODC`>T``KwSXmNfB;o4IYg}B-hpndw_CCZkM<{2L( zH4j^^MYdQ5*H$x~Nq}HcZa0& zB2gNObAUhMjj%?_qFjoaFDrmauU}sFl%3#hef+r%;}5@3@ncajRzD~X?3m7k+OwaUG_HY(lEOO>(CROLwdMa_GiDS%iW?*_vmkRx}S6~ci<{PRV9+- zzx#|T0kcaVzO-=A(v5t-jD*rn}YjI zSu_>%qN8lKvJ_|E64%!GDR=8S03`b+8&;UABrqHO>jJ9<2>SQ)1p0!9cZWt&;5Btx zJ+Xfz=EY2PR&`B7{Uop%t>faQVPs)&?9;?bsq5>h;=hVPza ze%|Jrlix9cRsm`hn`g64Eb6wVygs~i<@u;jhke2`5P$3U`MbY#Bc=LQU04j*3H~lA z7nuJ3uCH|eMl}-#4Q_)TvCBVCo%|jkZh3ljj!b0{fcs|~*@MjDMWlyaHKbOU^ z*FRiBoN3(Fs!iUWR5W>~(@8dA}0AA9aLMv4zBMij5$woPJpJTWG_LUGcY zBOgPAI%f9Pe6-l zduW+^m)58n3IJ)dhMyIdQAU3o0hmz{!?#Y`8hpuP6-W}NTGW!l*L8kWtLU=yW9leR ztIZIRiAB#Z<+pmA9de4%@Y zj@x!0K!&&%qda`s-m?~_Uk@+zLn8^v!M@9c@@q+92f^34!6g}(XDCA&()$XW z7BVW-*zPSDPyUc-Q3iKiiUNG6haMEU6x?s+T9%>j;Nl9a!M}KvGeYCFBn^6B3=D{EY}u$#2rGH#2pOd-uku#>Ts`B;WR#_ z#reUIClIP=3Cp~w5O&}vY6SCUy$-%jf7dqSWaN&r$3suo)D@lJyD{d-oG_VOW1W2?1SPDK_z$gjJ%wKW~6aV@+YU9!hcPHvkYknKZ{` z47;TNla$Mao!hjf;(RNB>X2fIWq$8`-NR<^-e?f;9IvVRrVTX;5SL})Y@25!2b3ht zVQtN%%sEd>IA+^q_uo2Q^^9wu$0`pN(v&V_W!87j8~jnB$GJIa%+ZlyGo6x(hqNY z+Jq_oM4v<+#>MF4=`5XGka#iitb_NJfFh7O+eqAE2TxAQH9;mFKO{~CP*Cs=(UW%JY|-k##(X?Q2l71SwK-8gTsV5E6cqC3yyD( zHSuFo0B4ius#ZTRhATRVUvAVk@?d#fm}`v!3Kbb>GiIV{l_%^XlbZdtyh{!hJkCFT zA9^J+_w&5qZc0(#bi{ehA8&OE;Q$$K#Tg+aHK0{DSbz`&_v%MWAC|^JQhH4lS!(wn z77Qz2a`?=tIGbHe?Tr`rQQQu^Z3;a|m(@rK(5Z6-+qB_~X|32wx%JA}uy%O*s2D=% zj==A>|NX_0!&t-|YoK8T%L{arw)=G!BCU!QN^V_#xQ;7Y%*>|IqE{}jE0kbfIl+qZYN}KRt?hdVSdk@-Cyy-eziMP3`tZR5^xi35LDWDS zB3Y(v64wkq7|da_>*xjwG2gc$?3hu)Q`-O*&5AFAq*e*KlN1f#z&LjLmO4EEmWk%c zP`&hXNrJ{0`dChw3(<;_$_`tC@zI@_xjO{$k{; zeyjwa?=^@y%jR41WNe^Sl6+}-qLOga2LjlgkHYJ$=waS~jAwCp`|Q;RYzqv6L~c5O~)R1d++g1Bwfk&e;MbQi~30G0il+npk-+Ce09{{*#Scqmqmu^w(qs9of1`o~2 zkAI^hL!Qkue8I$Y;>~^EBhTb-;5ug$SCasp2!^v7>}1ZGa2s3B7Q-c=A4S?0LXTK6`Ra!zEQhiEROJ-7OoP^VClq$xK9_b!PfA&QZburVD! z<_8g9nhToG?0QEFq-`6e&*%*;Nw)-0sPUW}MaLtn^%h_Dlv~l*7od2nJ1tkIm&@1= z;;dy66N--jRhTo9xmnkyvNDIvRk!-620W31Wx=y13-bbl3muE>F6a3FjEl!j=dS9i z9H)n{vLyr3NThnB+Y_S+$W>Wwvq7v!Vv}vq%%LCVwy+=j?$g(Xsv*p8G?Bevi>BYI zBackhG{1tMn1wqp8YOlO00@wV9qcwi1&I7)mI8XOCeB1;8fTTOy8V7-#kmVv{h7OY zC^x_@Os%8^jyms~O8Sg^d^A?o%z8>^@>&zQ)_eahrR;J^L1AXibi&xx3I1t;pPu73 zlLaqp##MrqSZqIA<27kfsB>-)F(3g3gh9OE$u{iMp_r4Wt5S>)b?b->eqNxbZ(EhH zm!Ay90t;7_HaBQPR9)qZM*Vx=iYF^d|63=&xEow8cbrO>4Y1vqTOWpP{%_sC%6MX~ zn_%@_$(@O1YWtHi>J!v=seGccrTHn8I{w9ki%w+M%`;yweN{0RU+_&-*KOqB*hKH? zNV@YN#WcOy4Y-+f^9>a9`$b0tHb#r48osQyCaDgIAwiUGlS&zoPuLI>|1!q!vzF@m zR;3#i*+TqLx4C&fWDk353fuK@!=S<)SCOEH#)17U#;cv-)I&-I&AF9f{E98*GNG%T z*!#*wN?=2#Mo+xuSE$c-eu)?oFhC2QNyBc%KO!A;D_Y9h1H$OCKFe(Yfe7Z)nYg$< z`>?!r6A0Y4m(t75?}a?7YOoGS9JWi9?D5U~s+k3roh&Y$|36c(YXgqGwCkNxg(cfH zS*8~l9?F&}krxj?vB7@(!;VDP>^(_x)F|43x|pbF0CJ-#FK#95E)90Sh@43viRQbl zp$u_8>ehlygl4L+`Q5g@UvPJ<>7+ulk(8-dXEVQr&p14yUjz}~qZB-;6J70poT-7p zy97)QM2mm82|%p^523<}{MAMEsYlbbHkBVP<%_GyGmqn?Ksv3E)VyopaV#l_zqQxT zkj@(+o$QnOv33`g`L=}Y5&q+nrlUGE9e}eH++*fWHOc8YTi-qFh>o0YIjSXUhXZd! zYi$V?@3Y_5n$VBdqNW&q9Ns+s({;!hjWNciy(HZqjeYqIDwMQ*SUyN8^GB+uKpQPk zW!re{LwQ4JL}1{G!{U%?C^F?}On@(O>E>ZqGC9j>K1}l4Nb1bfE8C70BN);N;M{IJ zJN7jmBEA&z{$fU!1WI;9(Ffzuq~*Xe8tO1yt+J)(iQ&r4Ea@2bbZ~qAB0z+!q5n>( zi4*mRGqhbjCjKkP5V5x_+; z))YGTfQ`y0SY6Mr#$DlpcxSnN_!qrh18T5yup-WDj_enmd$5E^8H7O@N}MehnRDsCHKx9cthey&IHNlLS~-Zc>VlZV7b^TSCcj0`|vuue>i9?9uwemj_Uk9 zM#QL_X7vCcliO`+_t>yTHIA1Ttzvk1q56-YmvqLJvqie~I)B7ruTe3LbsjI)Fs%Vn zKLPjqn#SYMzb;ezt`EzTJD!`-e}&D)`)RlyeM-Mle#wy8q;q;#OjMTpqw^?mGxIwK zhrak%_s7y3|D(unsF*cRfgdYnlzyLakoH&!&QbT)3`C5sPoV3$gR4PT&W0oBrna9lJ{cJ1%h)(2i4^dXI&P z;AU-c(Kb&qlHm?mhY4g6ckSdPf*$q&wY&66A9ZKKiSOp~w?eze{_xpq>wO%Dm~sY) zBFan}?}3o2+jfm)cf@cZ3aTDJDxXo6(NfqvLq+oD6G7i!;-1)bJKKn}(81HbVfe>~ z+Nwtj1ns8Cg~pmuxxV~30;000Me`Bwbgn@4C|tIvEvBryh&*_0cgbZ3nu~&N @p z3Aui{gd9Txt#cbeP0Ybn2`ejp|5k7adfaPZrOlmf z>WCI;gTx7Jo|&&9CtqFxE~?f1`QN%^u5aF6T7wLmEf(v5=AU>^ouYRWjjGKg&=6-# zX^NQDLX}>X+JGcLwcW}t@=))Pt*wG)Oc$8jYqVC^d$WlYshHOl|4aQ!*#{_Y09dc! zbu?~-4IPud<$*|N;=l1XnzrGtSc}asClp^@7C}5c1OGFY<9}31U5+u>wrcaUKT`+rftFW$N-{{?~H^L0W4xzlhW|x_7_xmgxD_4)dt%c{c4d0 zv9rD<^his^>RdPo7{`S==s>I+KwPkbJqdM^r1oG#q_S*}Z+7X&^6OuL@sSV*H92Qr zRUR;9U22_bhi$Sai0NOA#&1GXEeKHy1LkV4L6esmSoHF~nDvlm$e4VNHM7o+XGtBLHgj zfu>C{i~s?|@Y}mvP0C{M{?MR)MD0p#eG+sFqog28dCPOkBVma_L~<2v;VwM0$X5#J z^?oGI?-s_3Rw3HFM8xxo4+}gJ-U8BPjj@}vwu=}nStOV(`d|4pUjh|=)cyZffP>@i zAE8eT`q4a9G=TrB<&e4l5hPEMV$vpOP^QDhb5Ql;MPEXj#zZ zm?6WH#O7msI~q)~Av&v)Yj&Uv`O#92dRlCH%902DNW@mK>WvBB_|-KjoxP%APJLsC zB1-81q;@?yO$0hG^nLTDhUSV7_GH!BoC5whW%)=1_j5>r*$Dsb6>o;bvrzI`6lE%W zHC1BGK|MrlQ@BjBch6Z4?^{|e%tr^WmY^hx?m{S9=OQx~#vfpWxc|Y>lr2Fy17Nr> zLcm%LmpIAK%r6fqoR&u|S(%W|pI7Gj-?}d+NdN`G-B8o)S8^4-gR2o49O?T6GkkM^ z*C~J;6V>>t&+)c};0Govz-QwN%+8j$Y`mR>-9Z$ki?Wk4hN- z#|;1UP{tI352wT5Y8a56xu1?-NR`WeAG30 zZX1q+^c)=a9-3{}ZAl~MRvdroK*@;P8lRjOPf!TS<{f@#@tXorLzl3{A8Micngwd~ z$nT59jy~e5G4%E(B5Qxnpvbbo6O~=`nqEeeftP2o`o)l~k{EY@slr$gIIrRV7RR;wkx_cxsS=7*UF zFV{S5-k<-`{o}2H6^Fi>ZKr0Qnk=y)>B|qm-o(T7rvX3A5qyu;aQcz|b9+Z|-21Vl z3zPw1hAub!Ss1jk#T*%+xv>E}eeA>i7=1(s3+l&uvG+=p0t!T*Krb5p+YAX=F3Zp? zl2nHNH@o~VE+phAQGnQfI={e^w0~FD2ME*&rRNS$EM1tOPy{fJLtr%XF=9!`wZT7{(8MykVF%c<59g2 zyC``hDh6pQaCsVDn4(nI&rdG%%s5C{wR4ag&r~(y;|<#AD!77!^JlA;`2Yl216kQ@ zwXu_TsBUI-o$y?=ghTnDF#KL==s|i~H0yllcMdZRVz5}WFkBVokq!Qy;&`xsU|~!6 zuge=}gCBq4F%5DFW64_cIb!(cDc^$Pca~AlP8N1CZf$@gburfS1Z$1O>BjS)K}|um zd#eJO0m&hzt#p`55_q3B{(gAY6O?%HKrzhs>nX#C4 z5G~F+@O%M#aD^hioUl}ZYxFBpf#3w*D9g4;@)5Sd?xWPq`=rhE2&h8zV?lm23a^yc zc1wLKjRR{Pyk$C-0>W<8sjU{Cs5o~-bF(rF?0~dIlwzejcrQ|DZ*t}RLFOU+;EM&>T}U?POeKWFTOkyctJ|UUzLHEA;<@1%3QP`-v8W1&UPkp$-!UUr&KAN zus<9FLNx~?^zP@9;cxAZf#TLUSmtuslFu0Q*h&&Y22}DLqNZT3hATXsvQ}@MH`xyWhxYGV9BugXtt^LQ`A--Z`(1a@pd0TbPL>4Pw4b zV`V}K*Sz&q;QvgXPfHsy5E+EWD)c}qNdRqD2kE!@Iu$(0+M^PmsT?TdPDc`APT1A4 zl^6f@2m3ADn`TmUD5E4E%B*zjv9;hAQaVCKN6DpsZPW?Yu~l0?D2wJ2ATdgj0I5CL zd~a$SX?RcPGM8rmav_*$7Z2FyCd=)Q&mtv$G+p3$-)$RKno~k#grj-#N}U0lCgK(Z zws?df4;#go$jQN;ppu=rR1?37aGE@~-*Bk6vx%}8TFl~EvVMr0hWVi(==Q$h5yuEK zi{r}A+FFqG^AmUe^6Ki1MUSa4iFg{xpoNd53HZbHwhe|(r|T+A!xPcJHq~Qm8FpNP zw9Rkx`?TKO?l08-9C>6mhi#H_aY)^**oUVpxtpFR0!_|?NqJ?VyDA)gmwdL*G=!ZL z{eZ^~klZ3}Ed_{6JUhY{BKBSp(oWv7TU_N zk-hJGJ~L&Vx2eNrBW-8LS4&C;lt-F;MDUF+EfWWgt{;VYlzAz++n>uAEfFKtV-Yos zLii#c+v)?9naI?*g8*K|sx+hC9{uRl(PY+KT(m4o#7z-SyryF@kGLY;{)1;rZx3esoURXzbUS}j87&^BW6o+AI&4nMR~Vg$ zaw0>sOdy}jtJ|3#XsR}@r+WOeE}hejC{#HYbUDan#5b0*({GK7=YHx|gJXgkw~w`w z{n9ofk3;gRRi$aXyCW$O2&?XVmawwAW<4Q8Gg-ssZLY~n0lovpQqOvtI~X@Sw#B4> zo)T<#K2Os>pXFOZPIZ;+a*Io=z%KNw4B_AK7)77g#0;152CLcs8IPTUCOxiRQ=HrW zXOr`9D(SBns20IjC8om1c&PD(a%c^j5)czMDbg*GAx6b3E*1SeGBu14s!OWs#=RUV!Cr(k?-mmdd{;&ZI##aGA!&_X_MvYyGLWq z*VC`X!a#Dae6=jg>yNYBogKyz92UF*)DXrz`u@cIYuwkx=kX|r3@chO=bSJvKA}lr zD_@o*RMKYe2xIC6Tb@4pc>DGc@0mi1p!DWf_M5-VQo8B?&}2b6Q1P00D=X5+@EpEt zj}BKprdFeiKex8Z`(%m#D?O(Ak7wY(Mh=O~0D{&&#QL1`Ig+s7yHG zb?oibrTr~4635;o|A%c(8a^dcFW%*MQ0k&X#RJ0;G}L!Vv`Xexe-J znr92fr^L>3EEL7=i8+-;*EMV%?rM7wmF0pU{F$Y%^@XkzKsy(wC#775Affw4ioe0@ zSZ|LVt#`Br8iw{Yk%xyF;hoeY619I)|2e<=bfz|iyurQ)x6UqV z4lW=x@w)7F72B4hk|Ar;5c}KyI$G@p@UId5rSQ?nZj2Chd~7~y&29gpBdwS9YhpLz z%L1T!zww`i+YW;D4oyO5qX0TlN*LwBD19u5M@@>jIbB;yIo#sf2HcqOpp@N!Uc4aL z0Aq>$Wz{O$f&ty{ivLH|+s8A#|8L+ros){_PEpwDs8q<22$`KuCz7(Ga(}ZVNs`>v z7<<TD_&t8V|0=Dv_xruq^ZC54 z>v>(cUvg$5Tn9`+VCdGWj>{F0^OLq9S?yc<>kUCKrK3w!Ms97JWyDaU@saMlIdjT_ z%c`;hj!>&xnU9)^lwgm$U9HuPfGJ8=?8rEIAEg@e)dECk{ar0S`l!0rgA>vzns#KN2R9UOyP8E%$R^|^y z?jy99i-DX=4UW#EL{8jS?CSTg&pLiOxFGuls1CWHgN#ys00mq#TR&r7$qI>)V+oFl z68g}8E#*lal9+|#$Z#IAow())Hb3)Lj+FUl0b9Yf&jG=DT{7+px`lxsR&CC$xlzUs zt-YT~jf-Y7Nn_9#AWZ~b;Ov!$QvF#rs-}@1k$ui~eHnL^aLHffymr_|wg%3yRwQ0|F$UHp+9?n*}oP3k8$ z@9=l;bZ(2X+3^qcce3**9(W62CCz19je%(!RLyi={H|o;$*gIXTc2X0G(uhU3_Gv7 z$(*5i(yEs+P%Arw#cpqGF$VHnQQV1feZd{yz|$>$AxfH<_ee|0i~aYHz3igS(3~w# z7inhGh|OlMka|z=hbQVxB+8LZ*S)_At0q_DT9H4YsjbVR4EO#U5k?rhYGucPgf-8S zw-j8#V`tfxvT_7LU0am=wQ+FElj2~@S|wfe8s%{MC@vq0gW1FKc7#p_B&nzf^Rt%9 zqLMqV=9pwz+iEb!)wu|F2|h5sp&eML2PD01uPPsEJnmLvSPk5j$#bw?esX!UAut_g zYTEtt>1~efwoL>^bBUrz`#jQX|9*S07OD~SIveeMvEdjF$Tqge;{L9!YtTMv6HckI z?1^5L2i5Y(?MA;XQ{?O8jMt`E1@Krla`8-~rSHGBU24F}azTN}y*lBB{Js4#j>l>S zK7$^ze+uj~m=!}e`~%HAaM_D=&t}EBXc*lL9i$JguUG7Kd~2DG``-E7RYxOF!8fwyg5_tUZV}Z9WHFdC2t8BJauV}=3RQ$EfhzfQ}eTa14^u}{R(^?i(okTy* zQ=C^^%5i#g(8SJ2!fwx4K&Ri)nptZ-#+&;Cb`P$OI>QtiuXivbtWNFuaOE~roX>@c{U16;r^tD74Ew2f%{|@tJKvS|JWBJs z_XUTw5-97wkVphf1LxMZk>)$q=`eTW48j|1fXgwBhwo{T)o;K0W|;4HuzNM2Q17E; zTBIni!S(xTC$FyMv1k(yp3GfRv;4LW3&hqU*Oy$ZYM=4Qg}k{fn=uiHBH8IFoYUv9 zJG!d$lJ!qNn%V)C2h;@*s#Q;U@9L1fxo|MVjd>vsmFNQ)+Ek8jxL2jdnVfvjAz1och%f1c|7u5{oD&N$2W)OP1xwqE1se%bDH;>4-#f1l0!JoUxt z)b{P$olXTHN!`MuMKrbq{PdI~0rBP2fmca~BE*Wj8ifD&>M8>p6jO%C_d6ISu8Gi? zYbSSzpfg1M5Q@ZU27@Kbud_-WM(zHcOVe_g<|_U&-kCir_M>b6V0fx%awh;SI$dm} zhS0&W?Rw#VzjvD!2U~BtUj0&bossk`A333Tw^iKidP0H0!lNa8Cvo!+TuaD#`#;e_ zPR8FO;MD}#3LC_1u(O24XL2q0J*0BWTY@t=#!DbV2Ch2!QS8T1RNna7&Rd8N z9T#}GM**~U)NuI8P!sla=Lt?1XOylTN!r2-NV0Z)7PAhxLvI<-FnQ_C1oTn@TfbCxe2eFr=sf>=9Mt*k6Nhm?l9oqQ61|Fz=F90vfV1F z4AJc}gEi-0k&@ppbpX3~y1EfWchCquGnry93=!a~qE_aph?%Jo)aA1JaXYPw~n1PU1n{vZ{#v+uXe zzC?1hWaUVIRC7Rvufhe&(O?H4L~b`3)Jf6D%EEcaT#c^ z$DUY)&QCnE05_Veu4$sGExA0r6Zoc$*!cH35eq*S0CQAEQU%*uC0^#cVt(h(;KwNz zlu1+2Bk1lFIh)0QI+iiSH!iTN&HfBcw7_*_J#Gn#5F$Z zu(MlYEK)ZTTH!7!6|GB?E5`(O4`$e zi3Xi8v@6TLu`CwRCfQiThCyAfMuE0YQ&xpi$B?kpGyK()0Qe>n#au5%-kt4E_J?gx z%VMptda@U6Eu>vL9!muQ6fF0l$VL^FCrB zvw+JnsXj&P9tAML?sKiTy^uN?+rUqWW>p6v?la!j53SL!?+G7;Ii+%FBjjl$=M>Au zpaZ5+SA4uQ>vFi)tAv1uZT_zB+I-7DkZzj~zmtGu82{mPr_{-7(|_k*7u=dyIKB^K zE*sC_TUTDGATIt!DPlcUKx%23lgB3k7ebVEIRCwNTl|t!Fh4L+QD;=y*2NRQ ztc>8;0*Z)DZi38xO|P4cHXS=PA-U5;by$mMaF2bCRu2OA=H9CI_J7ap9mSERLBk8q z?7l~+qgRjQU?fdca2TCvG>5n@@Vb!+-%G-j3L^@p=%XJu+UyzfpZ5W3$Uw1 ztKMcj1Yh93fuj} z8x_BN6+Ka}3QWhto9jd4M$Zh`u9r+oVP_Og5;bOj^E0e^xB7*wx75()`Dp%?;YN?% zf;4=?_{ZDu(}Tpv-F&NCCDmQwZ0Ia$tBlHUs!g+FHnBtwFnld zHCB9Fsdc?P?;Lhwpb8C!f+dE(yi1N*2nH#HdD>`!c&Bc6;1Hsb@ot{fIE9U zLK4YY7%Bbr$&`Y98^ZzsJDo~{d0~qk!4)IOw)HB;J1?%3Ck{Y9cmjRw2ur@-3v|Uu zig?$HiuL(6*Q|x_!F}(r1&Oo7VYB+mLN^=15n9P{41gf(>Gt@dom&vP1DG z0|s7lL&2A*fmz-aYB>R2gK1(1V5-gQWZt|BvNKDt_LIhOx})uJR*4Ce70>G#4qmt^|*q3hpKS5x0WdK0%`g|$T8zoySS6fmeYWRxG)BW2mJN+L-6}Ma0sw8zV zLyi1{Jg61BMX8-<2qk^blph5OGIA1PrDzpX+mP->dHQRd8FVG_IRR zSHieqxz2G#6o4Ehmz&M2cZKD-3J}nH)|=SN1haP_K#Y0xcwt1F;rTTw&5f6S{55)P zh*|DuvJ4AJs4xN+KQMDFz0w=@6%xu5#lWO*Ft|CcB#O|`nDK#|LSTdb>OINa!uiHXDfVK-+?+4NecNZTF@J}(~JEj1@eg_gj4eaKETqbJ_t23dl})e+;pDX zo(`+ylL6&Rtqr|6FLNS^|7}}22!f!%Q9fAoc-O!_ms_u6W_~-$LQcGiex2&$pm@{6 z60^%CLWNJ`mXXhWZV1O%@cFp%>p_U^Tc+g2fP1E;3^WBWAFrE3j*nx1>^UR8;H!JJ ztzRC7MB!7Rnx8JlihRAJ^$+}~@hRF%Zj@jNiNLqsZw_xn^KHay)+=I_Uj3|J_spoY-Oo zR5*z>v!QhZM*J>1D&F!8(R#h#11#zRTdY7ZF4zSw%tiw~U>U*kaBBZQKYiFzaq7rt zV`ST21>(%U^RIC>+x=FAva_&C3`CI56{HxC$Aah>G1j)Wx%;RWMCeXcoICgbVl(WO zzKqTYI~7cOlD}lXhP)@qsT$^H0 z5>=V9MrIX_5tI=W^?pPK9Z&9SJ}t83?V@%>$Cll?d6_~|)}#{829OvXv2nrtQ#rg3 zta`Qp_5)Mh-<;XJ4d96zSx1`p0{5Fa12d4|S8Y?I;0j4_A>v<)JpJ@jnJzD_93KVG zj%Vjb_8L_k`KDNv{rp|&5Vf@UL1hcjO_1R4{F*MH?btLLu0J*`Nc3mQ4j|BT%=i|> zBo8AGJ-*ht|3)qu#<>V~E);j1Km5>czb4f%8jY;_itV^QZHj*?>|Sbx37S5M{y+En zuT*h`SeXxG`FaS3OWq$KK}B0`@#8@6cWy7YkWWfvbutPl)%t=Pq z_Q3PO``F{Cf{a?$ZER4?IbOsu8-wO2ZjFTb?zrHaR)V>xIm@+cQ-TJYHRm{m?j~+` zm7myYz3+^A&so4-7Z^W6C<#J#puQ^wyLGP|FWDAmN~q#JvAPYODHt-t?%}rgBlF|i zpIyGF?R5*0co%N9jVii`zQ;HmNVp<|FbSkL9+$~eu1T%FA#dgY6xM*qfBWgSL#NCj zz8j6H;o11wn;M(Tj15>+*tw_Uvx%6|+*1x;cuqeNE{jr=5J`_stjzA=O%^$|U0$N9jvMb7H)7y- zt1eMp6M}0-gWpXQTkd)8(m~14lviJT;wahIJQ@{!je4Z%*p`}8A0BB7bgZ;f8l$BO zjqRc1-jt~>1PSoqTAVLeR@PNnpF4W{Av+3c`vRqMC^vJW8G*YNp4B~Y;NWw`-<9@C zqOZ^=tuM5>Q#Ud3>e|h9v(UTC&f?(aT<6j^!6eg#-+85;zX;!01dSFyo?n=jM`=Sz z)+Tpi)+c${&td}TGQ!8YgE8=^0&V-?845K7Sp+KAS|9KnuN;3Gx8J0n&1Wf3B&X8G z%WoR)J8sOvT#wh?Dhx+Soblgtc$A(AAaWh$1k75N` zkL_)%UUM1(-6ie4%qI6VC=wFaLlN~y>-)UK1tW#xW71PrB7}zcdnvqB=}^% zn*}-`%J2o2Hd*->+kBr4v_DC4wp0w?!fyyv(Q`nY7Hzx3rq5dT*fehJs_)%YX}1!Z zDFvTQxhyP-i#yRj)0~sR7$6OQ3N$1q>L>`rwTSreRhU|1_6-mF8R4FrYA0l&aztkn z@W-QQqQUVJP;!bqvs)#p?Xq{xxe*5x*VBvB%Io~;Jdmv2j&|?Jul1bhJ;u37OlW&E z?$%&uDlQbu5o>6=xlxkaEVqgV+^5W_^Ga3(l4IT|{<^!h`km`D?!PJ5U~bFElv=01 z8az61NA!Tn0QHM{*>OygD4F$(=UVUcluMy*+NX3DcP3V1Jtlb30=TyAaf42)3;6*q z)bX~XPnQD?qVZ7ZYlmVlFCW_|&CwJ@50v5bIl{y4zJDsM2YATT)-O^K?CR^;{#7c zts`#9GLK_4UiEq#?7P{(Y0f=nPH4+Iwi3GDKY_XD+j>hGPM3`j&&tc^6suw??*8!s z4Ez>UeUcNs%%En8oKV2qx0n``%1yCc`DRKHa8tP>xHiPRgE8VG9|NI06e&z7Z&4ea z-6rz+eQ7oB_sqk)Kb99=eU5h(7GJHoqoAR|donS8ALgTBQ{FY@HLheQ`p$B zbwM258WHT&z)_O<+?JJOtffp|i6@*)`p65pAAdKa8+e!q6z(vbmm)?oM0f$ynYP*4 z$FqZFF~V=u>ZBP`Rql5I;e{LXcKc;Tbx{70s0e*8uKnu3+cySt!oo8>^4qQ+^Aebu z%IUnY&3`6|zoh1m`G^k0PvmtjcQ?#^7+`(<_?kbsSpxKSeGoea4h+8Na-i~j^FlWD6Miv%d8|kx^;v$ zKNJ)Q8T`}4rYR9BB*RznrUISuFl2j$u}Xque0M-laA_^ANO}{oo~NnWd~{B_UD(dN zEKG0uRz_B@W5^;&u?d4S2lBi0h+;PBY;<@>O#bV+jP&Ss`f3pS&j5W3Zi?e+?gE{B zfaH9PIXIxwqNaxK^YLQT!FU5oLK7jpe7pa!!4}+{j3G-L65pN z7DnJ`Z|H19c8=4P-)E}&9JhE0E1h}nqi)d7Bn&{qw~DtbiX#=PN!7W8Ny3D`mtV`+ zo@b+Q3gec=*%rC$wnrUckuHCKmHk(E+I$(d8A~B=i8;oC#d~O^KRzIJ!3D1bWkV~W zgb=p)a&rR(8M34&p-&Ty<{B9w!kACYG){4|y$5yDMd*fk~`RI!F)RSrXE zY-z{txU~>-wLV>We$-RuYezj@84Cj4O#CW)M}ydsZ@7Y@U5xX&!!FF!$37_hPQgdo@VQX`TX(-Jz5XM#KXQsq)VFyb`58)<`ASd794C=lXksE? zJL9*{=#zTPWZI#Z>AvNeC5NxQ{bX*`Y2JC+1ObvwOY^4_CQgBa+V&Q73Z`CJrv9ESlv1E2RHvP{R?7`;Y|pXAy$gT`!Za+jX}R2IexJkQ=gv% zRsC`Juq72p9#AJm&Yuy?J@3}-p0ZVbpr5dAmGf_u?p>DdefkpVvCRP8iv(1Os_2*nh++@B z3Hx}+w?bsCwp(6QVm#op&s=zhnpYV(Uo)-<@Gm%}5@xCJ0*+JP5NE~-wt*K`Q!ho# zTIrhPNELo52L1CgSWpRRCGGA%`fJm{1%Cw=)1gLFPM6J4>F(0sK9}StxEmSHBsTJS zDMmx!f7aem%8@1VCfqgy%z{vJ;sU4MJKybk!+EW1I%^r2el>k?W~4jOP;z*3Vm# z)vWxuK2J+LS^{sA8q_n;Kwhd@XXGEO`VY`2$v8U8vXO^K)U_$!7%Y$1O zq`206BjANVC>c5LFgFx)E?sZSb>)Xl=#d+FgM}?__aza^l$mb_j59AUWqP^n+#oz_ zeh{@KslD-R=;06227}^p@-(UD5qpa2Yw#tTOQljVf48+*`2ASuo z5z$78r#EP}u*5JQjH`goGhR6;(&GJep>Px(Ms20%>@Fs3@(fHz1WB&2e@;>6jx>gJ z!j_QapXYow^+!yoc#L=iO%UfzjJdRQ8c?%wAN`)TMxbLKk`fx}{b(Ut{2R zDqf}U`ixTC%lVOreL4VtL<< zwM}`gB}WbzHj+kv@NZe%&)NsMIT3;$4B?aRfY>JJZrhaCiL7b2>pjitzdS_8nC}x# z4=y&>N^zjkg89^_t6b1>7{bIHYIi?6KR$uQq4aWgly_N9;T+X0Rtpas4W7jSjDxU@ zFeUCQ8TWp)W4H z{M2|X)gs;U>ZH(yMbZvNg2?dr56IlWsgVR~xkO!G^8$w1X z2r#vCUa}$R5VDiWe_-%Az^P2a>xm3c{;B=K%SW%X`##d8X@TBJ0H0gD>8?7n3 z-Zed~USJn=DMn{sC4E;ac&cz~EgncBIctT93j!_>se+9MUez2rGP8UQu&SBe*&km0 zlXT$K`Dy-*LQ3|BUjftl@c*}~eOIqo!{_x=uo-3W!}ao|Q-*QYl?ckL4f)GJ1Iaz> z2aT)sUv!!Ca#O}KXtiA)fnj^@`lBC2NYH<;nf3MmeE3h|2o#kAAbyZx`CaL!(gTZd zWC0rBR5MU#`Qf+btS@l_v$Ng%YED?fylzWyD3sS=R3uC)^%6t$536$FlO!S5t(;LN$l02 z)njk@Hc|A_m{FrY)y4C-!tr+lmaTutY0LhcIP-Q`)y7NrgpLKp2RK-AMe08EDxOBl zOJkokJ>f&z(ZU5n!SSf&NW_^{p4$Vao?|#fX@xR`klu?SIwN~}59NxJ%fxTE(^?M}+?2mAuCGbXv#8RZY2%4HaCN#ak%01Gx6z1>X18eXeS@a!7FSCS8>k3DX}Z<$ zU8$j(*sa*jFSF>NwY$E4=_2Yct8fk4nm!l|_w@FF*rhuYs-m`iOzAryo(Y03;31;V zSpSAV|NO6+R=VR8B=K5a=~o-?idFbhlpM{H35ursN=Q@u0ybCHL|OifjZ0#v;`6oF zg|@|5wu-cwi35?6f}Ixs9JNLfSVgzQ`CP&Coq$0ER37Qpv2rBv^oCLs-_Zb4$wH7u={-ce-a`YLA zD6cX}QxpTG_9C4;gC2U5T8bPimOjbJ3V!|e;gz?wrqNHoMC&5dkV3QD$K5W2@f}5k z$)TR>hi3hUtj(?u%ue2_D!#lIufHv^HUD8d^G3#?iQ2(q)wDDrW`qJi>l}DtD%Uem z4w{Ls11ZK!tfff^M@y-4^2xO!8}~DwU98;zJz7#cbB?a~4-LfyutiXd{zw!Sv9zA> z-$sA&cBmT#(lOw~46qQO1Sb%H66t4>V}|^10FVx}1fyu5C`I+j&`~+f^|0XvFzJc02mr6s{}v+v$;p)}ydl)}o*!od_u0kZ z5SpJkG;SxI;LFIU6SNw5d-Fw16BYDA#PTxS{}v99_;Pb)@8t^~?okq*9Q)@M z)Dfp@^6dT*?yP`|$tfs$c}gSOezW|Wa=GLZh+b|AwYr)@4VAXgpR56?)|@UlckD{J zjZatrhobqRMUfLOik|8ARds?LGHOBtg;JEFx;IDUi)(RlE@Jqs-H_PI@X=R!7#!=r z>XxtgNSflozoFd@|C(lHc1?;3S&(q(^koZz$W}1C_XFY%2p1w+U-6R#IsDyM#ocW> zEtiFIsagOS)vY3JVy;ky92!T+ENXrnWxCa$!k7OJQTLM(X}gXPajlHHI~pmaC^=n2 zF`($4P04>ybUuQOGOa#zym&%_df7702~wm!NHAZr)cko$ih*?}^JV2wOV7Bb-b$#* z&KjL$0PVWIQ}5T6K056DalA-C;&x8_@#HiDQ)@Amg0Klq4#kBF5TC%REi0c>gLsR& z|MM65E52gurjzsaK!dpTi?Ucitsu=2UheR27c2M0wZzbNrZLkudO0PwHrfq?hu~wy zkbfS=E_d|JH%y)Gug~5LwDcC2#+bH6kAIk`c@!ct{H|p60}5sn`90xwpq~H7S&%L+ zfruAQbvB~roHRLF%9i-y98ZOV1217;-?!h#fSO)My22!Ll}GH}HLZd_KaPhDT#wmY zTQM8D+%ZKM-^O_l{@KBAsfIR}WY7;l0oC(1r%kK(`c2_hb`K z2o{Eb1#vR1XkJbrPe_){T5YSfy~_uiNbCep10X~QfBghyj{JI3675G6lzjb@{&{|n zOnnlL3E+C)zg^xD!oE!rOQPupZ=ig;IpxXNCWJ)swT{!DG68&=*USyJG4~nE2Q0Nb z4!qawQu^2OTUZo}E84Y69ez3qOM(IskH&Y1LK&V84~YZ9J@S`Uw+!bky0;Ndq*(5a z!yB|M9DUbaZ=JejESjwy&K2KQj-xuE22rxjiz}!5n>9r$?S!hGDMzy(>G)BUI6m8} z`%CYdN8bWyII(a_Xq~byJZXx`zaHi2{;<7eyb=J4ZCohYmHgRAwhG6tVeA_7?y&Uv zsTw9a2N1=MeMG6b1|vdT)eu}EVJ+!T;^Lau#27A7+Hf1)JN$fK1(E~mFg~?eiLprh z3AiLYMaX@u|7m^Cx?8cxn6;z$?@E0osIO6<<1HrdjnWBF_jtuBW98ncu9!>X0xMXV z)FpD?1$;Fs#G`4jp~-tRq?pXY&Vz->BlG*L5AR6VeXQ~49FK`u7*no|f8>ko!hy;kx4>4|!G^fRID{eu>L!?#e~^ zESoF_)leghYRfgBI6>jatz0_DiHNGb?c_N-6iI9;GpJOFf0j=*ePq!CA1PlU!uFNs zj5)|51c}4_+b$owB+oua5$nsCO*g40dKbcH#B`lz;0z5kZ&u?b?wQ|}!qvf71ejUB zlofM+7ioSZSaV?6th#?L20^GLb^KI$w!&ut5uU>nDPq9G%sn>00~_$IL@tS#)W)Pl z2S{8SgLZ80UZ)DX1}tRw3Jm>N0E1flLH*f8zzT_kRw5rAZ318H#}^g`GqXQ&<)O0gN<`5Od&AySig!yay0ujBX69M*FW;4v zCCe6Ehjg!}`SRETs;7peFMW5Z$q0q)^tMeqGgH?GC6wo~u=m8iPS&vqqa}}5r;CfLS;@LrmA-OaO zyFaTZ>=|~`QGw~Rs2JLM&}(piWPdrmmj#`%NSqd>Km?XzyWM6Pc?*>cpk)<`bCDifDigpAQ7dK>`{RHn?`Ds2w}2%x5h)_` zVNjc-pRz@>J7;rLc+PQu>w$^JlNqu^k*mEnHUw@V1E)QK%6oHX2U$U)ra*-r~1@Xwct&g>c(prUKZzdKA zd==o7W5?1xbZc=X0VS`ri}|U>AjF3P~6f`p=8u(6PlgqLJ8Z7J)^s$elC&9eBXUv2+@ zJ%7Dy=N)HL51tJ=*NNeN;jwWsq^!7i7G8oVhEge_jEgiu>~81A=FF47G?o3#mrULf zP!ZjOWc>`lm))=J7-=rcu(^{Ps=(a}O{wF~DZ`IgcD0ZK<@t>lShi>_h>~uCoI_a{&hU=`p-42QNq~%$1e)OsfMuBzGczElB{ZW#G)h*A1GQn zTymQpLo20(fk;|G1NJa`MfrZCP9`W(s?17YLA?oegnDb}wxaX^v%KC%BckJS`2wlb zA;wX|GtKN%tl#A48|W~L8`BL5fp96GP8-z&1_tT5CHJ(Cd^jjq*!YM5k&cUTr6_h( z99uqlX9hQR2}#v2Lr$rSIiu*fPU`F7_@ont<#(0OV(wZ+u>1MQ=876@Qk=9)EaFLwx$ zR;-?21JyRZ@ixV5d1qYn9*2^V8F8|AcJ4SZz|E0j1|OH+JnL}yH-sE(IsuGWcZqEd zFl1NNwdMmHhFn09K%Je&Gmxv>zu*za8Hj=>BXysjAG8~#z$2;AfEETe#34QL?j!ce;{ax=?T#Qe{u1_L!#FWLiUdJXBnLfqU*i_d$kmF^RZB-Ue$iZ+A{agbmK#xet#1maAnjjINE2Cf9IOS*p>%ND^W2q&erI( zm@}Dx11;p?X8ijHW%pS+IBqhOy6}ONEL)R@ZfZOcTVa0yVw%X;Ro zwoz;ys*arc_a-cl;c_ACmcrR#O)@blvhebErQz}-v!?7`0U2B`F@3i`+N)M4KJT9n z-%!|mMth?H?YX!6Xuwy}=begfDVa7wMEVdEC4=zK<^Nta6+;9nT^wA}9va7?=qr;# zW}!6inrgmOAO}&lvEb{jNca$zb2xBUb0;_^3c-^3l7P3hP0Cl0C$+uO?^326I&Z0& zP?cdgmY3osqpQqMK89LZ1k3*>G&q@nvg?oF?9u!k|1}n;s~B&xD9KF52Xt%dX>y-Z z&+-X3p-%EKW{*ly=H)}v_2aJ6Md>ea>%;yYCX&uwr34ZySP_sgXS3V9ac|W9$da!EKSqa61U*SN`sBVYMD zQwd|bdD?uTK)wmP$Kn0W-#%NA*FL?H>E)Y0iBkR)!H$!E$Kqq5+~Nv6NP#& z$+Gs|g|>+~=ekhSJol~p=DX6{HEg5FO?FOSs<~G>_he_h7p}q*olx$gr=I_y)Y>+4 zEjSaP?n@#`xgq;2EAS>-?Qu3^>JX(VV&|^@h5~|m#wS!q z&GCR@EdK$(IK4;)s0FtYgCC5MwXyskxjT8^7t!JU9|PILHLkf-m3ad&pJx(`jo%?p z_BN_BPgh9;Us=w^CNdFfdBP4qG}XDkY%2Xq3$74UG0--o_SKy`6j+K{O-yF%(SaxX zlxv95mnT2@cd@ktp3;}Tg~bMa{I2vMtzA2Ck|6tY{%T-S*SK@Mq->i;+K(M?@ZXi1 z*kzy$^6vH6kT;%b@{mqA%dx~Qj0eFtg>E4BC>l#X8As85zADi_zm)PEj8-i{-<6D_ zS+${~H2g#+X>T8+B1Lp&J*!AN79p8Pz-$zHo^!i8#Lnd5x+_-MYrg;kWV?T4h)Mhg zri~^~f6c$_%uoEIt(VK|3wA1O(_3f2mOc>S2Py7htL0wH=wglJ(&W~`7U%Xa7jN~5 zf@ux6g_UpN^tR}qty~D|DiT#`fQ%QAbjwg|Z1IO`Xw&eAU~>yO1S1u) zbsDZV^*QW*Pc+G4X;PH7Jzb>gpxJ$mi5)vl18>uI;F^!D3l)Q4Rzg06t7ogRo!`U; zNOvCvlTM8R7v`av2RP9Qh6EXI%2X-Fc|X{febko9{|x z%1Sw*w0h+aRk?Tme*Wb^+B8MSgbH)S6F$+&=tak;?a>_{{#Ly@wT#J8m0)z^#DiC# zGE@-inYnlC>hEe(QygO*V<|=QAn{y;UT4h7=h9t>2H;rN&KxhYQ^oH*J<U(H zmT|n7*?s1xJE{SqM*idQvxnW~#QUF*RHrfDHj-45?zA!xBjgdl5_*ptXnghc`AAu+ z?L!@9d4+WaUUgI5e~w^!6c#6k~-{-OB%hG-cUE)mwapsEclvTL=@TTAlkQa zx18G4NQW)+Kv>8M=`>DMiBr^?Igb07h#&&*zkqCsXPUp8Y61RG3hI9AWFfudawwYc zhLOHi1Z+)vjJHk5iCf2>%i^EUxRvx&A7ox19r6XSdC?>7VW(!~&<6#iAbX8wuQ9|T z#WJ#FLLQqjXG7?x8T6%T_9q%rZ64b@iPYOrqsA>(l>0UZ#<)md(`#A%h-{M?kT@hteJU`n;K;tkp8frx10=E~O~a zXz8|l9C2JEJ>_+6e7en?@<9^^NvR%M%OHpL?E0?96F_vn|G#+ZECLe6o&{yCM{OTtTz_&tWJQ1TbY^3Q$*Z)b%;J(WXBI9W!+zV-wCkE; z;zQk|O}YO467&T+u7IY1j1m5y(q5FIYe7u&(vT`A4u+#}KLavDoowv~j$*ZUx z=xn=+=6dh_gRJwFHT|YRicCw27EGcRn!HhdNmL2|;8oFuLK%#Hl)5IcwuspsYaq3= z>o15kU2x61r%l1l(^kTwEL-3x)Hrk`?%$tXe$u*?) z3&cQ~?$}*R91DZ~=b0N*#A}|PXd28lOYSs&E61yxx0wrz5$Vd(sD`Xk>tB8A_cw(Tihm+e(chI!#f$NENjSBu z{goa=t|==zV2e5^xR#4$v(!L6{#o{j37;OUeHfAHUk9{F$)OYr>KQ7}ShltD;)bIQ z#t#f3^0FLzSZ{n3#V|7E|yqA}E7z8}%lUkHC`sjhAofud)o>vCGut6ktlC zPt*U69A8ONk{Pz~)VC~++yBc;4@!QS)ZrV}Wa4e!oE}>S1zw7Y;2j~UEiG;f)!)8! z)A^rv@WhtTHYGek7ws?Wv;T^iqn_k_9HB||y%M@S2>Y(S@8aa#VO3aY2}$;Y*bO4e z#oAqI3AiZp=b_gRzc9+#rifZuU6dXK4bachrKu+>-Q2>nvgBJ90PT=`{lFpA$Y4to zZ9Y4e25<=p&;5?N43e+bn~w0o#EKwc^N2{oB>Bb8-R$9MFXYy%u(=s`A-KdK7|O+9 zEAVB~5Z~d9Eu%*{q&7>t6(7^Mavb~<`D^&8q-G6IJK^h*=-CBsz!kyD8Q_vV%$E^= z;4Ofl{pXDEa?+vv!y$VgCP&4L8uYpxC_0*4A4B`?zX|^cUT#N|WcjaaIknyOL=RY# zRnmaTD#0W?UUZ;j?!Q%8pQHms`n2Sp&ws137yct~Pf65C0yF0y_H9Qma`(=Gl0|;#Ad@o3@TCQ9c z{*Z7BFD{RCZ2cjy#)CKK+{#v>7=b`^D<^{PAe@B~4~S z)SgeErLxJKf^q^z3f(zRknLzV)Mm}cA6Je1soCy$8Fw$ruw??)eSFao(Cba8?&In1|D z^0dE9Re~OX&jLLTD|*#^{dtNZTRYlMCY-&c?&PDk)2K9&5{ujx-+^Uqj2=d9X+T@` z`zB#W7G0#n6N|I1U#hx{U>|EVQC^!ZsATA4gWvWxg*%}=6|A!Ndw9P{I145APqKh3 zH2oCnT;hi%p+WekB~#Ta`48BH#gzder-*RvuRs2R)(Y7;Cy8LSEaiw==z=`x_{SAg zH<(vke|xrOR0pmM>jF2t-F4Xi=yV*eVukoHCC#dreY9|y9JQ!etfkY1tA)YSd@@B0 z>|bE=z~b4W{>P{JZIXo|zVKjw9o$J((4@sk-fPSzuHb~1Z`$OKV z$6B_w`L2b2&lISK|7~kHtQ5l46>x>dhxutaN|v;R)zt;Jlp(+cL8Mrx!n%JrL?PM$ zHMQUqa+8pV1T5`^x(u0#0>Z;XM;Ze8;TX^=PtY$Fb)SE}@zl(UA_{yUwy@}RRg#-z zRR^L9gsZPCtwAD(2%Nr%46q<_0DPJ?lw|-;R<&b#r@%3~&o?TNg5L$bx4ifAl!}8P zHb)=}7qMShyPW!#q59z{Qdfye{C}u=`*^1J|9@Pk_sOZ0(}i3dZKtDB6iF9yv7Jt* zRLYX;i!7;xq$RoBUR@lNWT%7@TO}=*qi6^#S4(1Ln22E*MmD>!%U8eWs`uyp{r#ca zt=p|^ujlLadOjbI`~Cj7-yN!jvLLE+I>*mRH={-YVPeO=tot^07DT4li>YahQ}V;c zOU?B11I`==FSt41@l^t2c%3UbmXn&p;Z4SW8s5CfH?AfA#b>R3+Go=~r^bxiOg9CO zA33lRS(~KNz^;cpPZc#u3yE`a5RqQtO1r)nfo|94YGe^y{X{te*>r9zW7Hj=xU?=9 z?0|$qm=6Fm%4A1dL3e9uXDDRU+AVzvE=je#c6nb>_B&^Y$ubq7bFi}*`q)OqG8T|w zI5^5Y$0|#HYdo0aW0P&ke#W*R?}qo>tt_3ze5qdgRqK#Cmdg?wcbX#Tm)jy|efP(y zA?8zUdv&bvRt)8;PTA7_oy!>Z;$2KymWKV$sTJf`@#2n0@lv2tP znji7c}rI=-q?-#$EDgsm#=JlxMdBtcy38Nqqujmti%Q2+Wb zIbm%C95NdQqeYwQhe~C6lF$I070EETd$uue^gU3}L*nmqb;7?HB!tQ_2V!kK_CJplWqo$XnbLn{nNRsXpKS#C317<9p4jRgpZ%du zOET1`jDnlEg6?Z8W5fU1_M6Sc923V1BmetPci*&IdXG!TQsPgFuvUa7^nc33KM|!t z0&GI@qvXD0T^Dbm-GoaR$WYrW4AVNt(H6ZaYCdOgJo{zD;l)DMv)4dNFXs^%5mU3f z63x@#sONQ&^!kf7vhPUF)0KYqZqf32E>g9Rkd3C>a_isaAZXtAL z-$P4DBr=ce!&L=5oT&C7^Sll#XD}E_iFqp7pIt)lo{T~gLB2FI*6VRi2HG~bW=PDV zXrnQH1c%VK`M%mm$?J@@OM!dhEc(wkBdXmA#ZD`T!|3pRNve8MxSG84K02$OgXY5M zR$=k9QN3?II#Txw#N*QBbM|)goX*8kB_|2Y=?$-yFhm*^MprK{U1KMUud4%gvaUZiI0S`v z)ie>VLu1c+9u*{SVx`!PhQ1N!hR*lWoopn3u1uHHvi z-Bv_Qm*Y*x9hB?&Espb&sbJCQin}o>m*_kar16M5i0+5<(L}HksB*u**W5uWByy%$(B1>n; zFg@hWi<@ng{Hrk&s%kJiBEE&fnt_i<13-cv12aD%-{L@rl=t@wom0kWd$a7w=5!Dc zT$Kb)YZ%a$eu;&7Z^-?G8Xbb4Qt4R`a{I9UObzG{U{m3gd7I?G=}clPiJrM#B!7yy zWyGnL z*fog9!LgpCSgb##PiXVt|E4PWCk8BqOgZc=X2y1;0qZD0nCVVJKB#v9k9H-q&_Gnw zt->*Uw*L{ka?+(3Lpnim6~m8<8M+S`RIt^!9+9Iw&(&$Z0bFZ-f~gLgfhbrJIaYMa zbaQ+1?UX>KZtMH30ZS1JAYUMYmElw2VF67LQ?|$bm4v;K%C`;iTuh#>}3B}5giRir<@_QlF>Atkpc}G1V#voq?rNL$AYUMyq&kC6z=3X*H1#i3AzzfudTs0wLgIbd z@BxhO`faQ*R;EMPZpJ$_GJf4Ex}epU_2v5P&t;VXE$zGKrsWca=h&sRhrx9SR$I9T zx#o@bHInV+Xat?7r^FY^2n^rnS?!Z6HWxSLD-T!NG;6PhYhpcl^hVuJP%~C=7t{## zroU$XRXL`mJ0HA75(Y};Ibs|$NM7O6&{6y^F#DZC%-@?(KOre{pMhJ$`GE@T8|3l} z*$D4@g6@cH=+qICxzblxLY1C1i7(iEY&!Y*r!Uk{@7$4o$7Yj1OLtt_qSg@GT4K}e z+i$2(0CsDccqjhP+DsSr@l54bdot&z1##J;RIAhjrZy$v$JA-C31j7wl}Fk>z{g@0 z$kA@P6G10>iwyh5=Uh%y@W_B~FS&e+!CG8rL_1zPoM8Ee{8Pjs{;iEc0J^tdV!U*P znx!#HiEYAST>0_GgqXxx(n9*e)AtTt!etG^g*TdpaGp&89=C=oDoVryj3%g@csb1e z=ToPO#(gd@!ouO2zRTLq)|VaG0tt1IAgVF=oS^Z7<0zl(>_`ZefXIb}sd&f1;uGz& zx^`-P6B85Qs;o#>+0}qe>mOg=&S_PCz^!w{A+{m#dOv2~pzx1IUuzZVSl}`Sw^I`V zd4K_~Ze0*nrj5R`s~y?s|4Y*;nF6znG-^Yhd-|DV4!~H9?@QH}0R}tMwx{#`@7X5_ zNUCwf2RL$u^otOYpm+CD+Py=y3_~ezKG8pVrjz}~2z_I1^=_#*bThS5Y@hZ9*bY51 zxq@|%-zGp%hacETkOS9J1`(HWEMjXPpEw5n6pHKCy;bIR;ZULS!mW7AE`SJ|FaD~v zg)Uum5jV4BwCdc^p{(KV`P#e3XTl<%hdL=jp=yp>pKS4d;h~=(N%yr!GoJ}qnCdfX zzegZ)EKPZ{x(Lmr4HI%zp&d0F19m1(!!4U?BBcr-NUlr&I1r-`ZL3K&fQXNftnF|j z)Mz4@xn7CZONrl=@K#p1`m2@+x;Oyp$6=L)3Fyq+M#DO6mW?Z8xNvl3WIaDrJ~v*0 za)WdZ|AIVy6RF0f&9FiCjct3_xy^y<=_ed2n+AFd?7o;Cxc%$4aOY>o;XdG_`9R-w zVexO}>h`6!a21p4cTwRfQip5JA`El0a7&DNizV-M0c|;12?8l1CWD3evwK6_LIa@4 zE)KSqpxquG8e<}F(W!|Pi`B7&^{=o?8ut!Z1U5T3 zj-k*nROf+z@OBY0gwwwali?!E6(0*Flbu#rAHYCDQ*<9-Z4rB~sFi=w?4#7bX?tk% zoIfOpqVEZ6tTuZPzQZ8tL!g7BWR5RO7DZOr)twC@ir{FF!t^G1X>Qc1dvCK4paH-E zEoDItHR&H3dBQ~=&Hcjbgi-ymCvn&++3nZms{QY`p4;&DLXE$>WTpTgx;PnNCePKO z@FScTR_f=mQgA`VncQZ6gzB?BXK^pnb2gUq(K@MfU-ruz#<)8p7yaPuK|&St;QP+` zs~^9!Qc@){CUi6ghHg;f)81sA#qHg6eiknWWgnya1${`{x-1>tb#==DrZ%TU`}~54 zX%_7Uidbjv^vR?3eAO|Xpsouwma5-@BjA@xAS6|=)jvbevt`bk-m&S^C)Kxrxu$P_ zF^MJ~C%+ZsSPr>!uAhUuEP5U>=2Nv*f@nquEfSp=m9^yF9qUyXHjTD>nmC%J*tw@8 zG2^O*ieQ2Uhmvm|W_Yzx*wOarI3xuL)0xH!&k5t5;+;drv@1e+iK%&$C-?m&FlGqY zGw8i1I2`{o8ycN?zq5Vgakv6G)u}xDDzv+m^;Y(ng`rp1elp%e^>;+c80-><(1)E* zcHG9_Z$8O%u9|PJoW(x12a6jsAQc8a}M1e0ipxbgQYYYhDA~Z3HXeO51tPv5Jt7t%MPzGuyL?8 zj!@en)FNyrLQhCB3hF5;F|dE2mo-|B?mrKRNUR)AdGd6H*@y-+Q7D%p;MzJLkB& zCR%2uLI=b%3;r@$-f9S<$XTkmGCQo37{26wkqdg$u`qlC)b;aSzpy0`0Gm6nUr<0uj;s_(0o(<8wl&M&AIXj6&Z zQY#{Fg6;4cQNiHDzs!;_{Rk~Xr23qd%yZnM>d~L{3!7roSgcCE1a=>ag>RGX=61bN z1f||%&?m5M_IR;(iwv_YIT4c}T^Xtd)%f$TT3bm$x;ujs&Gwjk5}p-*({ETSwm=h8aFThOh@as`u-Q3It-NpB5k9^sOwc>;X105O~d+wM|9-8C@-Sj~ReEc+_4*)u%Sh zk)m?cIHp`OH`v{XJx@Tsb=v_XQ4fT0MF^d(<6y%gRzi!0&D`=WA=q!39BACYSk84N zjO>1zY9fcVfyoK@q{BYIko2pTxmUB**zEqxf33a#vm#Yy$W@6rG|b|18Rln7vV`-i z-=t59knf2+^Rjd&ay%ojL~F`6iV+`0jeGV@qwkLjO&gUnyF5^tG%p>`?(>EGRZc5H zeV2t9&Cd2cnmQHJ9&pD|PiF}>Kr*$p;Y1yVf9w)m;W$E`c6sFEJ$ZiOCI#lfs6R;n zKrEE= ztLv+%pghDD3=s;yYAuZR#fL5uIxP}V=1hNz{&=m`ej{+~1k%6uE#`Xvyxcg9LklR> zuCfF8Qy`Px*Y|K(VBlZs#5%*?iStba*5{CWZ>p8s!R(HFzAoX~E(QA^RfsyJNUXb< zSrC12Bc+;F2`W@PimkSuZMPZ6^Cd^ocJF4ULH_}hQ=^M?e{Cq*krTb?G%azem-s^W z`3T|IFB3g;_H%IMF!3?{YE|Y!?rhP{1w$98GAGTOQ&hM(0q)WqA78a<1E6tpy+<-_ zUD@1l;;}}9v#0zwGzC7ZVSyPIi_n{3YM2cN#LkVAbJ0t{Y&bE&rz1X6P$sn3I&*2~ zSFJPIoY&E|xMpC6Nx{_9+TG2ZiY|I z&`H^dnh`9xn|A!R9q1I)o{ItBs$ZM<+_S#uaDSO|OQtwcxRiXu-YM-gg1sc3y2i^fp`Iq$ zo2+0p4QJI-LAPIF7fK0iigy**HB^5oty@>xDe0f@K|3t)o3KslnT~jEeC@I}bQcVz zIP!}c8O`lGEl86`D$&l1U5%kI)1 z;Eufb+`U8o_ny?$RfJBq-gR3wccedNz>QN&12>!$2O_vgJ#>H7$-FYrY)Y=_!oFF+ zbCemTM?9X$ytDA!)D(r>9T&iPxN9UervRsyR|HjGHCUs<0PAGA`+bbsdFs0M zXpqjJeh8{IjD+u}cV2wP>|V8vpLMe@G#6f5LLZ4Wi0#7ZZ9LjITy~HMKZbW;yWV9Y z%svhS_a}b19gD?6R16QGfivLd@Zu5`dL^ql;+E|9OA5R-vm_)Gb4}6c^SUO@d&S=A zW*m7GOv<;#<&o}(FR9sL@LDqN@Inq0?4*}T%l zE#jbkU2p<0P+bEIROfv&Mg@a@H@=ZOdYWm!tpK#o13|0xntcrn`3vTzqqV~J7A{a?s@N(_-@1Q@Ddqn=DTdz8Wz;0uP>iYOIKf~o~ zi}*dy$w~e>?T)z5NGw0s{p585bQz%HNUm2WW!j|W^CU%C6=4dnCj;66nt-oH=fjas zPM!a;+=fpY0YTC5>bi8=W@-*oK0^XdiSmw7r?j<4RsCBW%M=hmBtqiOg}5e|bmh8& zb@TF=&Qs=`q_hzM)~0~db#BMT0a)-CnC@@ua!$uZB1ZDiS0vIx|APRP{;GBBqFV#2 z*fMO9m|24bgHG*xw%ae{u8O#FscIE!81H z=1%)IOX(CbA|x{?Logxx;}|TWPk`!=ua2HL)?@o|RBB4}*>2`haEQ!zR8kaRp3}qp z!>;<|vb~4$TE}|y{9&NS zE@zTL1uW^@2-o=PB`5v%j77)*zN*3fgIZ9aiG210W0PZ61>o*W^(R?*Q(J-n@KCzT z?v{mNoEq(SQH{5YJKd5ONCY91T$vc{GyUZM9v-)m|9dE~@8Sanu{vXS6z!(wQUj)9 zkwXzG_veE{(-}hQErmq+=N@DwQr7 zdz7{>>0dhlkFl8@w4xz32lF1341PZ*T|MX4p|xeEDL|yOBKv%uK76&VAiA=Mzbv7+ zWf6tM8UL{7++Re1$*}pVLb((i%(CFw7UhcQf&Fr&LtCQ5R_EO#E6Ab&dT!29OYtru zm`IpY+p$GmAa-stIR*+KU;o1(e&UAl@#8_j?2HtuYElHrPSw|Fb7wv#8P>}!R;-Oq zMZL>d7jUOxuNu3pzUkOm*j({s<}vy&!Q8f{KyzhCSC~(~Z6x~r*5R*OZJq1Ypy(k! zN&%01B#%S2DY{w3^?m<#zdb~{Ck9C#6wJ!*-p}lRPMonId}NkfaEK#*xY!iv-?m+v zyS=NGAb-HR3YBO(2(kPGqa*3IP;J-+&Z*kWp-Y4Z(`EO(l-?b+Au?(mXY(fDQhl=O z&e&j+ehX;<#w4{){R*~u8soC@{RQ$)R5sq4?96d-8&$IvC9!}?MyYFKCY)3EC~N5u zo%=yFO%|4rgT5$}ThC=F+P5jSO znRblufiz1{-1zdZFzxy|Y;ScQBOpab_vsdM(S?B(1rB-Hf2s?HDrw(s0H?9N`l{u2 zQf$&7@pT*;5o#O$Np*mG6I(tA`r!9=R0p*46VEoBb4aP-F<;tWz-0Lt*#Fu&-HdUP zzM$)nGP(3uP_|uZf`W(X)c5KWUFRO9ZrH>{SJBFqRshy6lSBR?2j-2TVETyeY>0U) zb;lq9i3t}?$XtwFoxr1KBSw=6=E7YX@>IQ|b;#HOvERt!QmLE(&8om_73i~bx!eOp zM)d*PZhF;Qsp13$6u*H2xK?A#nDtLDJU3Sh*QCaRTS|-_rz8GRA;9P*#rHR_ze&-Qnhd#W!KH064_|7vrdp>)Of&rBNc4Br z)SZW`C9s!@P;0|y*-#IX5L+o zC@`O)%gB5m+b2hwOuxJeBx8i?P-I)udob#Q24f>~e%3ukh9CKS9`GdW4geP*)(|gN zU+nKg-C(GQry~1MjGe|}te|{u9b^yav$pV%JD=nq)S2?zDM2*egqKi`3J(vb(C8E{ zDYUzRH5Z56o}52z77=9^X5p$UZ-$r?id)1T;n+^-*6|4f=6;`$LE{3cUr5g#Y(JMG zp8~dIr1UA5%>R8cg3)q;)oRdnCOj3G}H>bfeZS*i3c+#WOFE+c3DyzYk zSS6BYta8b8FrpO1|B2MCY1^sPKCHCod;D2PKxM^AhyZU>K9(vj@M)}bzb%LGL=jL(#*U7GsUYTGDS-lvLdkTa z(Gz{`I1Lk^vG)=uUW{tUAUjDz8Q- zog0Keyz2Jh;oPI{4$`2V5xu)2n(hL6YQ!>&yAzFtk_i;^sw5 zO0Cilx?cH+F$D01a~eiKS0MY=J-G3Ps{#X}y7ZM!L#?geyOLBwmWrqCP&KhF`l-_1 z0Z*Z_6Rz0P(kgIyrFy_wK5O5n9jFnDrx zoI>eN{FbFieZqG9D7hK!1Gj%}qp^Sj!YUh}km5Jhvfz~;534hGqD3<}mP56I%o0)8 zKlSyoZJ1L-)IDbLo>%BE8b3xO?Skh3UqZQ|z8gzqtAhVGDs>D$K#U4BO}F=VBQ=-7 z?;IS#{TO}c9%y+Q#%~t~(RAF+1nwKlyQp#FY{y9a=H9W$Y?Jp%AYIXMEcPNbAO1BM zbtyjtt?SHDM_hGCToS#Cn5TX^Yv5=q6oXC8!F*=s>cD?O|4yDf7tI3~@30i@7o5~^ z!m2}2j=psa@22W&1Piz@ZN#P|YRde?mbTbB2Fo8&nmP6H^d<%F1CGHX)vzD!39Z|7 zuyUFKRz~@e(%Ybyy@g1jVt5iH`+=TvE(z8fMIlEkRnF_F(qdzgx^Np1xM^hYA?Ws^ zz8(4Z0?q1?l`l6?gY-Y0Lr21e8h0(ZWaKxJXHGkAnhl0T8F`2Q-#dO7TYA3TDdF90 zwb$9zmH`=>w(|TdyeA*48Rj}pxt82`&^;dm#~VK1t1F)YlV@#?KNM8Oy5^t2Y)nLczdDt%0X=Fx{(6pCKns1gQSfk_g0o0%fD<9uO>*C752 z=9Dt+vDX9!Hl~E@%XnacHUU*~u%mvulI%|gn`20rUYFy1BgnAzdb3-j`2-kcb87Vp zpB_sLm*^W448@>Sp<>Nc2h^GI$YHz1eK(!23t0jIo4v~GZAdg_6ws*B0_c=fjaC{luUGC|cD?xo#ZeDD z0_VVQvG=nGGhpR0oepYiuOdgK;+H~bY`A~d!NY*LB51*;4zak@YguBl84?3NHR11z zm8U>F2G9F_e!p zJ5s~ayW!S)c5CQxDZQ8M4ytF@r^HnOqx*{1B&(%#54uX9@;UsBkKc|}1V^z1d9W%X zuVJI%jD!ic-StmXy=&;NWQvPi1x_r@FCJZgw{cYFLukE|DOhUzDdP{9 zxwnD;H!$j9|BXHG>@|OG<YXJW>WP&Txo;sDMPd^h`Sb^tJk zZ?BBnIIOe8kG8#D9jA?j18|NW+P3evDmdN^?&>*-41qOm$U(MBr&TfsdGi8bc-5UR zVC^?oJ}cXTjxtv2C>83^{*YTu)CWkKcAv^N5-c@Ow2ApTe39^O=!Ns={wnlr@b$AC zCKk3vbFQ0Q&$I23KAU+37by{jc`_sOJ#gV?xRPfdH^(K(#lLMfXi`$mqx|VSk+>ww zZXU@&_ZcWZ!SOMy=Exc7XS`W-LO?xgxq!OpGGRF!ZI^Ik@P zFJF6I1m$MQ+QqoQnpmmYzHzH{FNpe#1p|X!|aLRxLD2%)(7>#l4-T_lrpO%Niru#tce&3J>c3h`{uzzF5(`b)P?{4 ze93>2AT{S<%eIe=I&9RZrUF+}a0_CmD043#C(u85ez`lTF=W8rkb9VA=UT+5T3a+u)cI!~Uo&*dY%47Q>Q)^t zB77}zhwKXV)0yhcVzw)Hde!5;wHLv2AeZpA=Lk4QUOZ^{aPJ|P%$p#K+8ti14huwo z3NKtKYKv;$anAzP@>&WMkPhJH`|EtLHxHCLY+aawqUod0G6ykF(TG?amb7PKlZ<5J zogTl#-6?S#05*rLy^1i>2bVmXyaP%n)+>(ng~h5Hai8rR6X-7jT=msu*lob%QJtwy zWAQyXBNuN&1!#d;);AUHO3(2!;?6sc_3An>!X^p(CGh4uP^*EVJdq^709vbrPh{K; zG-&!uUj;H`j{1RQSANCR&a~<52)4F-*$iJ1Z8<&fnaaCV;Lt{{-@N>AXp;|JVT8Te zwzUE5Uin?~?!|xF)vRs!+Rz^VbM^+{hG3vlE8OCoW6XdzF!gB!f7;(8>{{{tQ2|m6r4b>4w?_38<61=i^m@X}(bi0Js^~M^aP^q5xkK-hz27cYC+IkY| zR1KStF>mrcyJI6*4GnAu$rHTN2~za$sO9nc>gxDqTrX9?Qu@Q%lB&pHbn-UKm7&Ax zbH&z)E&r%$uD#{`F2qtdj@^5-Jg9xGv>jtOor3t2X~+oqx8TaF^9_2&w^MGvHvad3 z_&+?F-kRT+zPhY!Pu!C_xDkC3F;6-(>-Y0(HyiDR$!`KGZze}4XV@(l9v1F9BXUe^ zI_Q}sL)+w*tJ5FbE0RQ1H8SDy0{&1`KAlkJNV7BKDnxWa_e2F zr1qfLmGjMgsOTGn_m{>WsBrBbkxCoc3XVG?%73e#jRfF4fBW%+q!(k};r?elP~r$|M#~TA0w$C^s#xmCJ?iO_oo9B3r^@V~(+8EyQ95?Q?SC+k z>s}R_C@Itf5wgXiTISB)gXx&_%*RCskkB)pa1g*4Y?Hk2%Jv7JS`~CTBHi$>CvMzv zL=X)W@ZHZ=JOb^^ZcKrs4H0r-?wu5;gaplv*aom|`ILFPY&ni&+bhKIF`TUX&P6nK z+`PM?Q+iu$U$b|YYd$y>_v2TRaC54J&B5Q7&(y|cuVFq?OUwppIqiM4=C*) zGC#>-qyWgIOME)ULKSlL)H4Ry=$^%Y7n=pP&0(3KGILjPBEgMNNE%#q&2Ep7C9@mC zR=W5BffY>uZ|=;6J>aYJ6rw+WjB-Oek<*Kq&0OGvLaSRSn>Zu%S=r_Z1>a6=HyVMsVUMoDm z)ckWif};u*a^b~6gFQ9A%3oBcS&f{af6pL;cOu1X2gj1w2nluQt5zN}VHf6l` z$e__YDwL7Lzn^Grc)d6|7p+({LCX?#!B5N<;XdQ-lGkmWX-0)pouGbJUmHxgk4{?-mF5+pP;|sP0i@us&GQf3Tkk0I>_=pilYQl7BZhFLf;DayYyA- zV)xG@b%)}~{@M-@lTIxT(-;-*sBg`3OS!R5&66{ckGLY_Ev){>ydsp#o(qnLAb*(x z_R?eeIVqVoFtQU$lCP}OysHsPG`ay^$Z^4Dz!2+8MbF2|x;xIcfuJHKgOVh``H&o{ zRYwp-Y(aC#Msz2Qt#DiJ1lz;*WPw7|@oI}${zr_#d&i#35F5=0UCiu82SAW0>ReKZ2=jUORyssb@ElsYi((^P}IWfFi)k%J17)Cg{BLaBz+1@nnPFd`e+cv z`RNNy$lq_SqnU3!2L3^5-<*qx0PQrBN9qc1UWGW9yV#OX;vG^~eXJ zJPN+gm0yG_Ivn*MAlYOFUykEQih==t2w^E>mpt;#F2?+x5*HV^3`pjA5|ty@ciI0o z*JAT9i{OY<00Mz~QW)#7Ae_lpL^-JI(wWyOt>Cc03YL0xr_`bOGh4!j-v{>5Fb&xN zU#j^3!AZf;DLIxX7q5o=+^l-rR^DgwIb;k=Wh;-w_lTtY@z{F=FP3dFa?bw{dKP4r zIB2ueI%*ej=*s$>CTm6|h_%`YXzfJRFQBU9tt_m!WlW%9qE%y&F8XnwN3HohADN-=&(-^f3w z!13n2cn;wqK4OCNIl@m(t5@s0-F>QgD!yvnhbzS;SKB9Cl*>q;0GG=pkB2^{iA+!@ z9cj+77t!S1qzh(~0Rl-L(Dl2i#h-bWmIxqZzw#_)MXT=hiUCDx2@p;UjL%!<;m zHiBU~pJI~s1w6XeSQi7XPu? zmd7zAf8cUhS=Cl=1}Bd;|CL#a{++ASVAJ-;_?Q>v`pf(YSrkpoJpCfGop^2H4M2#c z5^?e08lKE^buLzZ4ZNN#i9}Ud(qv1fNc7ZJ^)XM8Hm`@kTZUz;R>)blZ#wIT7oK^X ztk#lO;|bV&|00*YKOeLoIK1=+SLE&kO0x}Rdp!%6LAqS^nf2RZssT9X`*P2ZGU{n} z!uQ(3TZyq`!1Mf>y}@dHe;oYiNXs%6eFbn@LN`CydnRzhY-<|zFWNU+(ec}#p2%-? zT8Ztw=vmS0FN(&Rcc}05UgkGGu5g>DgMFR-XRZPiS>#++z5;UDqrs@oI8!Fndoe%0 z0rkk3GF}boPWI}lp2gD{oYiRiC_V`mJL;x0C^BfbtBbeNDE@QzX+kf>ax)0*)O(cj zBe`?M_X=q(d6o&zsMy@c$@4!k6g2s&4F%#Or@iujSU9R=1#XHs5l$ zD!y;ghQgHw>61-*YXR%2%imoNf{A+mt1tf%@z;6xh=%^$y?3SCJHNWHtbO(a2S6gJ z_dCCV1OLfw9lJO49qdDVU+JvmJNtiBd)omY4>)=4(eUfrDlLY$#$e&PY|&e$zeE}} zB_zuBjHRRy%a3zOx>YJ|5D{YN@L|>ShV?s05EIBed+Xk*bEXN*p#!HTP03Fo)F%{& zc*s-6SE6h7H%2Ek-an0=r$SEEcuJ|*g5gXR(7yOPN}%M9e8fVKJA@q^yN4|d?+R7N zBKLgUs|PnBTQy(jzd!%YTK#dy*0@hL{r3@kxDhb_x!@^Y@)M|O!B6oI!_n{M?f#K( zPV{@E8VHz*XhT>TpqH| zY6PyEE!I1c+^bP0kc*tA05NpiR5NjqcT)yb`g`@tn66nAYS;k-tz4~nw^`#MPikY{ z9;;d#@qyH1%Gq}3*r&$b8PfKpOC5VJBWkX6lIRwWv(!v@1N@2e0$Z<_#c3w5tg+c{ zz#97?cFHXY#e9I?+8ywN|0afG?Y6aT{hYhl6Pd*oRQWGWQfcZQcjX-CqXNNPdY-pj zhzNArG*q{m8J)DXq`ZDK#y=?QL{T~r;sLS#*^HSL(L-lxjw}nmWt9tz2cPk106k@k zpM3slIb(j=1#P=pfgOpAIjCAMD+EMt>Q($-a}C&^2}b@01CF4FEQSA!e6p|>1uNEF zIe}xmKabJ+DTcZd+Z)79YQE`;ydHn5u;@Z-6N!8wY}0GW4x@d$6)VVR&qG=EZVESn zWzx1_HD+EG&fhw8)O>OJsF#0c)9HJ8VPCbz^?@M+)VW6OnTB6te}bCn(r)u&UdDL~ zU+;~+Hcds&VUp7i6phb|iib(7T1+Kp{jHbW0DAt&)69ug^Ut+DxbVbyq~sSB5EgDH zSnL3fsO(Y9o3N;+SnYorp&!5V*PxN zTWWA<&b(QvJ<%NwVW+;J(mS+JTWw^E%l`N488EX=}=!}-p$to@s-gvFL;udL?hZ&)Y^}^ z)8i*~k$5i+h+_Z@-new3eIYIYWVzNw@ zwm0qBPM-~@>{BD2K2tA1Q(wxgx!MiX>!*F8#j>aG6w_|2o?qIrQ*HfK>v#2DM??yP z9C#c1LcqCIw_KQNxj0imzyA5RoH3i}EmfyipjuH?H#ln6;^f95oL2@-QHRtKqWe*M zwhyAVz0G%{J`gm=UCG^Tn%s2a`Fl*)S@GfF^HlxB7Zc$%;$E{RPimL$9k4rI20u?- zKo|eRRhU9|5yMaK|E<~lz|IErH%Pid4ik<8@F394z8|7Gu(Td06Tf>iR@f5eAihME zdexTdv7sd2Da$x%lP1tDb1hnZ4$2(gg}g-wg5G$FS3t_eDts_B+V`e9VUQ`GPm{M1 z$OrVZE-ak~XM4Uu3fz`hPGd{4)4N8I@;vx=P(hAp-FXb{Z^5L*KJ!_}M{t68b=I1Nyn&% zMz7J3y5Q*N^I?ZGl{fOz+Chy30?EP;c63bHe{w)0;VQ;%K8#FplYx7Y7Y6N7wytYn z=qBa;0xh`G7Y(%~qjzATL!jKj)K@#*r1mQaen8$qW{TR5!($uN)@tZ45M%fNS3sDl zgIs{t=9)qAM)=E)*)OqrTYxu2@${`5=UyPk9Ow)p^nMXn@*A8?C!(KPtI>#xB~7Z1 zx7yCA8qf;bM|9*5IO+6jgOeX?0F%1Dqd)BhxKspcXj{|~IW*LVAY)!fj+-OEJnlOs z9j1yt&#B7%tNKsy+nDLo91;f9gWBTaT>Z^N^s?*H$fX4XK>GrEh8`yPwZ1G8J>l@$ z+*y?)nyEpuMnS7&CsaZH94w>uoM)tM9ABhfHaJ}1*c+Glqbl30*JU7zSsNoOxRc3H zL)d3AFF;f!kb~1c00Mo%w%knc`+a3R;e4>__MNuJp6v8Q9HLiUQ@FRbVyBch9wmpD z?_rbhj{g0H`;{MEpHI(`4i!eYjU-7F|9;-9^En2s8Sz8E%Wt`!h_;{;xiTmv$A* zYny=~%Q1&Z}9Q4i>% zWB~d6r`H>gE;^cRrksy#fDb2(bVDK`$-%)Ij_|KQq>oI9V(9!52OwW*{}Viv?91a9 zV0@}U63HeE+C|R!B_s7w>^_Scz&MCo%aRDiM)m-ThW}Q_tw`BkMM9#cUm!r??2$R`04=_f(eK(3=eIaqBHwuY=tRxxlgG zab?n6l7uJcDHaXdKH38-&`X-ae%R!_<`F;#bN?qWZEl&yDm07%@_`Mm;Rqayt24KRknWBrp8lc09fceY4Tf!1Dp1C=WA?h}q`# zIC;Ycvs$mm1sWi~oq{2c>j6FPj}|V?s@y$!!%t{1WevSL>hc2&dIsD%;#up#|1|#& zJp-GLbXz13z<(`;*c zCxCXiu|os#W4_T?$s2yr0ND8^Gcw5MPVyp6>G1!AgqW{5UIOogYr$0l>@XiJF$GMY4XZ%(xR>~NCZ{3PRw z0>^)paep+8v4i&f_@_S0UR^`fHMBE*FjO47IQsalHN@*f#50UT8>XzuuHx0Z^;Di> z3x(}@`W64xLr~eII_|dzSiR&nE9{I71(IfC(g$ur4OmW12=hGIv!ya_p}R)`y;p=b zJzd%p?$Ae%%y$5n%$7a^`-bKgfcxMsTea>p0jSv(3DlZhsg57t8>%AL^di6au z>%;!>s>pysOf{o8dy`XgB}F^TwG$fbojic)$EuvV=fttoP_GkrFL!LNSusZ8L9Mmo z`f#qUiN>)VkcVyRvx8!hK^sL>kyWI4$-fN zOqU&x&jThP8;dd_TIx5o7$|r>=)b*Tgnk&M4U$56%lo2&BMRvc% zzc{BTuHay0wj^QLsP}k_y<sDt%44Ys%@TTxsFlRDg{^|$@cY?@E zm9V2T8$}bbrk_k6l!6mvDgBUCLS0tKMVg-X5Ka)k|S+DCi7*S(=ZBVV*RVnPsDA{Da>6+d$lfFZ z{7z@V>712;lv2Z)FBl#pcGMnCY52K({Z@l)kUT@$B^8%UvfZ{2H&z;d)zXIqd@;|# z3VU0aXuhNPM$?|PP56BYy7C16+$a95jp+U9^^pY)n+wK2PGvd0YdvgFEmWpd*)Itk zyHRuRd`-$p1bQA&wBucrwknjMVq^}r?oRi@3J2Q}-##9E?23eozRdgAnm^?9Y|r{F zyX*snb0Z@^VO;x26+O}V&DBX)BK50ZnoLC@g~D^Z#kkZFWo%0J(j2}R`sYwIoC%u-M z3?eqatt4)KF932E`xjg5hjBKMp?!J@7{1pbH~;^St8WivdjJ2gvtF zD-v>>t;;EuvMz4rvLp$)gTaD$qBo@QmFPqGam>Iji&+nyk zKA-RR`}<=J8TQ_5uh;YWcs?#5_71ta#N5ZtL<;tyOPY2VWRcSvVjcUX<8xX!1wJ?Y z2|iu|Oui=a(wEMaKmYOla(*?xwDPGwzZET zSxuPPQ#+n#Ea)n7o~LSo_ID+%L1&mN)SEM1sQVu~CCXz!nQ~iox(mpp4EwB(Gk=qX zePz`_8u(NeMj!!3u{0hjb5XbB?ZN}N2=?CT>B8TTcbFm&IG>SUg||c`oD%y8h8x4v zt&Y=rV7*pWK3xNbjtQ{q{8ISvb{5JegaznIRnsZW=$>im!gzJ;!e5rR$U21WwvcMi z3=X$(@tPuA_jt!)Wlf9IsukG0xZBkJyGM$Q#nKdp9^k?xmr3Tc?M(IOe58{!%0VgV zR1wA63D@JpX9f&aQyDLT^#kIj*)Rf)79(p*YIjP*+Vb-TedSvBP?;PiZV?#Cb>ID|-E9VW1)=aT4W7F!?kfd=!e;*Qb%S!wX=SCY#c7oW%C{9 z2v{ca27N*asJbD#(|KS!gn^)rVW~wL>h9XLfAr%(I5Y|U1%;Rkh$Oo&%C`X41=i^ zS45tGW$+2RBbOh9X1-#I z_!4#o1&4R{( z@kG_m`ff$5;Q)9X>tj{$-ix|Ijyzg{tgG2Gcy`UtF3mf9FgrtrnXKeI)2VQE-23!E zK68#9B5@i=G+`4hYf3nQQ%HmJ6`h37^`z@pgJJEqXr~W?A4`@}gaD;+VM7pR+LO;w15}wxDRhF3Hgd4=Yn$;fCf-;Y!kd|PS6AX{HsMnM znSdKw9p@`9bn04?@$Rd{EuP_^4%2P-uvf>7KMdVm$74-0Aos$low@5kqlIAhONdSH zGL9-4eztxe%(!wDFU4Cqsd>xVfvTRxl8Yz@I^Mc8r!8BUf<9_D;;Gy8AASWZ;IrJ* z^zq75-279g<{+j?;n%rJB~VTp40ZMKu13B&*oH~dg05>z!&^ObZ*5H)lL|y=IX(&w zGsr^IKTeAYw6SPWF~$2dnSP90&&fHcc8pRoXvlyy7VjYxN5Qh^zv=jP>nfg*fTp=C zjNDI+D*p7JtC^jS{RT<7+Etvga6wD*(ymoR&f~l9^08T!%moMvIhm_raUjRpQG{a+ zM8S{4#_3J!ptvXJjW$&WN13*nqpw3`iu>@sAioOYVhN15pi>~IUPPG;TrHd__5!OY z3paweAG|`ve$pXR?atCLFGz9{6#oTZmS!j4m3H9N)rbR)&-a1Re?ReR9sA+{GWA%E-jtd|G9l{uzp!G^8}sL&AS^w*xT8qZfqIomzJh3&kluzugRgZ!p|IB zy|=gO-McDKOQ2Q*9WtS6!dz4Q!{5u`l^(J^ydlHxm@TG*7bb;=Yv8u(&MPLvWjx`K zH`%|k$!@M06WA6^3AeZxFX*5@?2lv!ALgw_9id*#&eeeEoc0_W4v>BK5Bl1 zyOorZE}j<Xa4D@T=o9W-ooq08BTc{5Mrt!}DP8a7ZWZLcPB4tb? zR5-~Axp*ln5Z9jPDX=Y+2cw0h1H}hz7Et1Z0(^BTBU>lu@Q!)LMRU7fZ zRi+o$O^I*J30!nNQ#Nc9AAW77M55kta3Z*g^g~z9QxAQ1WkH%g5n<~<~;v_591iAx8$-hjq-0vp?8COcCZ(Zu{HRdGcfUO47Z60fZ-=xYveewcW z5uSg9ME?H$Kff=Fhra)=n9KiJElc+ifgb`_`ASu9n7B~Z`|xta+~iTaJ$|OJWSj{ z*(Xe#2TObZ+od%Q7W{_){U2i`O{(%CeCiNhnjcXCFeK!8$m2=Mf`7$31OOuCkgOASpI@WeOPr1QG@pX$0Ksb1;(NvULQxNakYBd~C2&T_ev?9aZ*t>V! z>EOK(wED}*wA{f%<91TO+x1rag*0qYs3WhaD1a`_AA!D9)KXS>DROQA&T}^kWXs|+qI;;wa>_4( z3o*Oq{Tp`$bW!qpNGgKVyM(nqZw|Z(I<7gnyv=+jb6)#hIL1eADCoR)d-s=Mu>xxs zy*R_nA=t}z3v~rMTXgnpQPz~x7{`-G&MTT{>pB*`&3)TutxUj%(3wfV(QJb_zc%tp zJ_mT1s&)~|;I0Z$K3`l`XW_6(Ie+jBPS z-G?Zh4+wg`jtGv9xR+2jG6{K(&4V)*yz$?|=)75FofhR=k6qvI%uOrzaSIUj`y1vg zqUiKZ&);{AloAzZGrjw*qoTQ!5sgttn4{>;2`!IfKu!pd^oSUM!hkD^XngACMFhsCLt)h;;NeB(PG_okb6tkCTJ*KW+a}bs$bdi=Yi; z`f%Cmt2TR{%%M7}Wr!9Ui`3cs+pl{LD|_-|={Sg?%fo3-1&NFtypwVyB)wf0jsb6G z`nik!A-OjvFaPQO=hKWNf881=jU~|b>!Gd^+;yJq>e&D4PQZM}ixc-%jOlDkC6d+o zWMu8?5>S9CnSa!iTM3B*6r2C)5^?8#^LaW&$RX16DK7Q2wdVXfwB9FiMSZdEXK{%6 z2X-nFH~=}x43HFHFJ>q!_PGTrwkyDe2Y5D=IA5_tu1@zco$IBmeywWTRIIwE&V%1x zOezO4A}&*lcKt401B#>VWz|;huTR*XNd2P-xDS(5%@a1DZvRrre=3 z=9gDFk4RUf8(0Whg|5{50bk0RK&RyQ`Z;cV_c*C0EJs8p|I|L_scEhHReIf=B8`!5 z!S50W@=R<0URIMA_fc*)`q^N!;_7JF9h@UektTz-LPE9s;~@ESI>ru(Y`j3ozYG3+ zS39HxkzBc1yS6Is6+k0E8p=n?#%f?&hNY#=zkT^vBK#S>dK+w5F9ijN!vvV_?)ZLN zQe-SkMgu5Tln;>TXRE%Gl>FMzrY#rItL4sVrw|jnV=M%5tt9oYsxy7?~UP2YY|8}tFPglnLY)ZsTK2qm<7Jv%F^Z8P+%-^Of&FW}@& z;0xwrn7fm4*9)4R;96>ViK4zf##rY!_DY9>(@Qz_(nQR0s`}_I#0U5FUt+dqpOsh! z5dHG{p}(Zb*<_(O#cf*y+Mk|@0RXqChL&Jm@QS!^t_bem2^w17GLZ;;d1d_24zCoZ zT$%(vnTz0(KNg(#>HM63$$>V!V@aQ3s5k&d{s1LX6X9elrNsag_yx#HkGwWxXMqta@8#&^nIgxx`oQNY!Cuix!V<`2NJy?LnO=+}SJUSg z0WGq>2>W~PQ4DnoRf?>}z-jIW__T+Z<46j+4jA7_YF zikP-QERYN`(iGu=vkW04YA6{2(=ZH?7RXPx$;q&V%tLu_)T!^O$<-^DXm>*MSEQbQ z*Y{Zr%Zw@QI3>p-V`f4gAqzVV-bRL2dXxd-07DYREK7C{`9iN0ixs0V zlyx~$lrKE?sP1S8;$Z|xbD`q>5Yz5ps|2%pTWeM>Y6=3coDz)nZFjZ`oSWf{N-2YC1x-l8wXn=U^UVK(Fr!roHhI0kK-b3DM@7X%`V4PiG z|LDZ%28#?dELq@yp4{T)q%G0%kL~femzXqu=B3h@e-#50CzCvNjzDgzPa7QbTNN?M z1vY7WLPc_8@cv+&a?j6e`7*tTU_O)cn?kDq;sBAVA!%Lp88Avs){+sl*j2&g_h}eW zo=BWI|MiR%E)y|3=kJ=+I=0S%wdGe>*Pk7ZzX7pVaPo=T@b8Q!^{))3#@n2GNz!~-7$bUICzcQ-#>vt!?8LK zKv%4A<0`C@pJ0FyE3c!{DjN4c?19E>I;+w?QRkw-fjD6S7DP=9kD1yG+q63p-`3Az z$D<#mzHZ5bQFu%nu+HXyk-1Zf22l=1-wA=m5Jm0V;VrvLjYck}ka}h$7ixBqn;fqp z){2Jr&fY^fziTbzmfK+KpX<`Tp4oW_WtjuO{T5+@Ub9heee1T^oarIuRU%r`umi9g z#T8wkeY2kw9X;+IKF29Hh9h6@tf8Ap2PDG@EG_VwTNCwe8_;lIEwG5f8?ML0aa1V* z%F~J_oM+i*x9Zqst-k!}g;%q2_p{e?*^15~aFRCjG6^o-gis406A-Za=M~7$S42<$ znuGA*^3hRF&(HGFR|q7ak>!cG1Dj?Ak9hF>{EkmAJQ?zk*LM~Ae>_uCKowZ9fG|Zd z%52>AAs`8f%R`rHp#z>;*H)rDZde<$CXIQM$Qge$keqYm*6&zvL_b0 z38}!1O$qqxN=3uHS4R-%qal*f*=y56HTo=ZY>jXKmdK#2pBaRjYD}|5@#+fTN)$;@ z-&NbLXj{?ax(ygs&swBzk9gISE*hTktGFH5N)zw3HmuwGz;*b~-|IL^5_|}YCw6*! zQ1gxiQKf)%GD9hAzZ@@5iZ{;m44(t<>rFFEhw4W`%3$5q*hSI_v@CqQ%ab#8OxCJt zKJeI~!ZsZFp}3;NCSe`r+v)xaQAM-Pjo(W`9vXj?RnnRA@hYXgh8O3W213LF@JtxT z+SK>7AZENKqod)c2(sw=Y_ml>IX^l7SjdZrh#1u_j#Jir**hPF^ZblziwqbS@aPkS zGXCB%Tb-*ahp{v)m-QRf>Tu?LbN>Cv`43=Y-JdR+R`Jk3PZkcpWMILYTVEm%^`8Oj zxr@ONr>xQs)9-}KhUcvYb+CU(G@BW7n1t%66Pdk43%L=U2U{roKKbt^N(g%LL|`lJ zSxN>kwv#oqRT`=|Q%?q>*RkL$`wXHwz%K%HyA;e0+#0f*PxSInjUD4{`;7J1x{PfNc4cv z`6uURp8=Y&L>Mao-*@UiUQRFamm(qbPk;B5B4uHj(!_@Ey0m!AmB60t`r)$4h?9K6 z4sv~agnR$Vvg_yi6_jbHU+KPiV4n4R9i${r@d}*R+AxElpK;{K*!vFe%q+x7x^#QK z=kONpnh#pZ!=QV)e3g@uv}}cH3H|E_1i71<$7a+al!3`(A*?BIdl7N~(_u1raH{t` z)9|)==hQFUW%mzTv_)9_M%ygVtKWn=dcD=y9pR_cv}>ok`uaN!n1V}S(t=Q=?QFy; z$ZNEmy0Q^Pqkok++swGLY-MH>gY%ySsxov0U2MYqb$K2Yt}tKCQKT?cs;x+B-$ zNj3a=s>D1^RCwl5>K`>;VLFZv~<-Vc=gDL^$3mhgf0Pn}ctoDr>j-8pN zTzQdpbgm@WAGK`VO*61~elG*fb@UbCSQR*jpAN%!yk#w^<`3$%jSXX!ezvGd$S zPFU;4w(&hp2pmjfABN~G`OU_N$6742g4I~rH^he7O7cR%h1Zo-gN#wkI56Yw$5jFr zO7M{0d2_+(CdX6suTN{%S-81fWwX;=-?Y2$y38r_Ta?GkQD( zgZF6P^a9?A66|R6>*?m-BDIB!-PT6Q38?uIf}1s8Vn18YNB`V;zWc3DZt1%>m9pdO z`xmS~qMxETlnE#l<3;XEJ1VZqJ{J+vV0_4j6Rm;iSU%!Qh3FG{xkhBWF_v`ee3bO? zsCo7zRjnyv#)Ay3m=jDIDnwUgD*Qo3|fr3HYod+o)Ny@#)5V8+)_1h<02 zqEJi~*%mu$+X5kwgf917B0rq_tCV>%-UkXd)R+fTHY4U-(aN!$+u81@d0+j(hwuaU zwj7JAcJ~1l{k+s)EqBS^@3n4r zP`-E8V%gxJBFR%pYZNa`S{+7j=j1VClj$9_hZM0sd0L5$+YEnkHxPU4R79%AQrYI%+^gtB!>zl6ADJ^d0OnpU4Sf-YXp z9kqLg?hJUd$9OKY!Y-SpT}3Ggqp8GL3RLe=_0fo){RU4xeL%Qy@QP*Nyw#S0lEaXa zkyL~={3tvwTMvYXpR#K5!2)qEKrhJOO$B>r2HM0=js*2&mKsCuwc}4J?)vQ9zFxLx zm%>Fo5fZLXESqgvJskf0L(FzD&YBuDX=Zfcs6^>U7!0dwlqr2=Rg{@p+RUF3i*(+k z#dK4olyx*a!j#auZVqOA4U1)!TLKMM7{zwBCh;mB{|1wW;fS4PDLTQT<8vf*nQyg= zgxoj~$wuqmM$^ufxlI*jyg9aTZ0MYIaTM`}2gWzqL_-ox6p{nG-v{1a#Pmk_hU+2n z2w9LZz3LTVO?hH1RbowSo&E?aD-QZ`3TGAA2V^sPff*Zg`OO?KEk$?9@42My;T&@& zd%^S@Kub`D_*9l7`nDs-rPnOBzJN_#WP(z0=8p(y@!HUNz@Z$8((=YI$#Z_)CdilB z7^vonzyy(#h*gRFOZE44 zqK`A6lEzs_E*APlXIq8W!rpSJcXz543$8lzH`_~2Au->6zj;Q!$=)Bdu#yIi})Ny zCqVb5-CosiZ%J;AaGrypI!|3%PK^F1&oh0BkVQ2bU`O*@1Smv)Q;sG1Y)A1>ULNlq zeu(z6?`*FP_k=0EcLX`zhFLx=P!lD`$t>+QcKf@FE*wo$Y#;gwl^OETLdwlIU!>fJ z)hF-d#O$AKV1-}U3{7vwygdfxHp1C48`N+20C$xs5PArDB?J=L+jVF*4 zPzUjuO}lC>aq}$7lN|Bf)`X2}i7SHf5-+xGD;&;x%tj1s*ZWmqyC(c|3C6)^@4LWgToF;ZTtSZ6 z9Jur9Q-h4F9RCleru#8u_pHWf{No%_hQC_TqrJ0dKaejanlIB*8BpnI{=k12S#bXR zc`fOB27f4-njD;YeinKzk8+ZkjS|~swZkjCpLT;CZz>^HpvkTq6`~x^#x>Zgf#gAT z*fgVRs>APi&qrG$HNj&CJM}{I&x2g6b{#Fil(Qg(LC zYO%GtcxkvEjr98dZ@J07GFl6 zOlg{G^?|P0=&TauKHLM9K|({=4Z%o|ug5OfQ^m2I*3Av+Ia!TO8F_B_JlNU>!xc!K zL=?q6@9fLcfO~$*-~6W!fhZlhF@u9MgPZu40Q=fbN{jvZ>=XM^bOFGWieenKWVz!KEx&4yy3cn+%PGmcGDkG?t>hQQ_J zffEEp2>ZZtDoyfg2DbE5QO*dTDGXlj(X9Y3@*60fcuf9F3}DEU85XT)s8w^>ZJ1MX zF-$J92Q!xT9N$cuAs<#A@|^rt@ru)Ym$*bxM-e~TcSo6&*l@#mFl4vS#D{?GNAZ?e z1$l&FF5f*Xc!SaY+cXAfmZWfXPn6LkFmI}%w~af$dc==judMK=l}M%VMJ_M>4zY}nX1)}2LKmC&{2uP$ooCOk&2()NpEwQ{juK5NlPGf*5-c!;7PrblqZ>fg(N0(lYGJU$m6TX4?=3l1=^D2&nS$ZULUTY_9Dm65E?f1sXgA5Ys0 zfKEzh|Efe_K0D>%v`8lnT&}TCi8KZr9(3nrXd?Z^c3g1?^CjYmsA$)p*bO17LgJU@ zASL=q%>FWJ%7KK_uhZF@Z-Gl>OQ9#&E?g+p5u%(HEV*J5-|ZAv57LSaeu$s|>v6yk zC{sY4r>39{F>Y$*k9~5T2X2H}-mt3e?8)RJZivu>jWHhVW}{%PVF=lk3)ZtpH7DSn zqu)zT>d{-nth_whoF;c!5XqqC_G+C-b87)HgsEZ7~;cYyacmc5EnW<#_BH49&$fO4Xhrxu||icjBS{4~^IAjkZ@ zY&{5qmnOi~#`30U)-~IVW97P&VYHi(1cM0Njh3rgP8wd7IFEA)T~HZj0^s}rFb9MQ zXb}6da9rmiCJ#3UZDkdm7H6|h*w!J|lo5aJA(ys6?QcuTlG~mjaxlxigZ=k1oAf!{ zKyN>Qw7Xb1;dE;dVuK+u8Kt)utn=Od(HD04OLfdO5GQS(OP!j;S?t3ub*8VY>@iTL zCkXn1y&`bZm5or(DG4#2irLwJdk>X>0d?8WCdS7N7xKD+cki%pl_DzBrhcZc=g>1e zvEdXx+wBHO)8ZhmXv~ZBDmbpydGulRh*Cf^OrN*)9_{s(q1O6qTbD95r1OmJRfbA2 zItk}!Wxx2{xQ~CwQ6_?=SCqbhyX&sRL}`)Mr*{AB@ldnKtfCm5DP7K}u(SE-B^mc2 zouQB02RqZI1`2GzN$0xj?L2L;m)uo3P+CW&+%Ac{Cb+@bm9b{ywBc0w>_gVd znon4JnHy4fKJZAUt8@Pmiv|t)msnO_0F9gz-n~wGk-UV87*tWT#DOmx@y~oxey_Pa zw;O%sVi-f2xs0o$Kn}iT@8!@Cph_iwU`@Q$jS%)zeE~Z|>T`rHLT^?9NlGlMufPdt_W*o6qQSO=VD-^1FJjpuOnN;H)Y-BCY7S zGzLs<>G`KOumO{8fiKCzCKUY`yP0OhtO(3QbFKY)LW$cAhPlr{OPS&he7d(L1%Hvk z*PGP?>aVk$U!GG{sj_Ia&#tR)C`KOMIzZ#<%K=c~ho zdd%mO-5`dQB@Kh0h*rEPv_`u&MbDe_U<_Pb_62Ie2o;lX>PQTS-(z|5sU$Zze!;Z( z_QYQ=Kot*hWnGI2i8lWNK+FI4y`z4$^^(wi!*XCWzSIiu-}8?W`Xu(h21?KMmmG8c zm~WT>j$dacfEo3Y2q2?*%fFI=?C-6)C-5A<^E;R_Ey*ABm*l>G@RqE=a?gAhq5oG* z{dCs``))zx@TgoM<&fdG4+*|-J7SeE>=obf^*3RH6kFjbk&XeoF?lO1RCAOxjtb2r zJ=N>4fquXkAf-D532`3Xh2N+>H-3y6R1H!!XIRiHiU_;p0kcy&n*`U2a(|B6m|1b^ zfqWJ;U71ik;u!V^egv|qE23g+Q6gafR1TvX*d%rMMhFTAsyE83znAqmB0*dLc>-#u zH}v(EYtK&m)(q9GS`{B$Lz!$ZN3{4+QWIJY*II`6+_I%2b~l0L~bui@J(vWh zgC;U7k%Fkmgnay!*x2|Y4_4W#Xv4Mj%2FpXaX$9L6h4HJ&o%9kUlfb7BW$qg%wz9v z`qj6|2ns3Z!Ho~r)&;ZB(QeaSD`qy~KX*rHZ!w=~!s?M*l9BD?#`JsgeeSL;CSo3| zBG8=%*JMi<(b7v1_Y!A^(pH?R8->b|Ab){fna6jg6rl>3@J-qzl8;-|hYT-~50Nyp zo75s=mG3kF`%G4Y!vEslb~KBrNPF|kVumI_5X|Sdp>}arp2cpu^pu2&GrnS{d0{3|+Sg)a zxPjR~)vZBp$QlH+=B@%-cX5n*5@}6)Lp@(^5nK1FGRC0tNaG=1*I6XbE1=n>M0#Ty z2oyMgL7Bt*qWaYpil^Guw1bL9!OZPg<|3!6O$n7qE?*cFI;COT@0M z9rRH`>8&OYBcp~QM>RYOO+*EiT^LZ?BD^_GMZ=v&EI~4M5mC2X02evSuaiJr?GT(6%|y zA6P;Gh&xf^)f~gkF?+kNh-hnlZB7Di$+x#H@891od_a_}(Hc?7{DbpjGOfpAi^{X0 z;GQ5$g);AmWtay2Bt9xrI0M3K1OsPlpF-+%rPcN86QA4sBr5QDO>-#M<0IDyP>!3| zd&TINy>r9mE7p(bb>NZ+#0};ol=_39ax2Y7T<=}=bnl+yj)o?6qgNS)Ww&an2FR7E zAT+5#;kO|@ywM|tN10pBRuymKPOFU)S4|TB=$>~<+-2E+wNH^@ZTtoD4~jt7Waq?q zkK%ecY%&qGACnM@RdGKU5*W0r_)sJ5fYlATkT*o5THTE>c%C}MUWZywd=j*ub02Sm zA|Mh{BV0!4_0W%#PU(yBa->J@q*H#z-N!%7a=!$Q5?(x1KFFJmnEC0=t0zhNEdud7 zb6eD2ee;3~8SYM#LuuEXB>mFiTYyNwKzx8Db1P3=< z*!=Za&*M9GCnnR6eTJ_RegfBji`;?TzdS$UTXdyvMr;7ozU!B!BRXP+D~FOpc*{e~ zVxe0!RrKcO?1{a*wJ$KRI)9FYf$SH`fvAfieB6thTPnBQ$n;iPM$QeMZ@k10*;SnK z()CEh&2IUO#@K?jpr1KUIb%LsAM2zn&^dAU!z(e0we|CCCDl=%v4K5XnOlu6EXV(f zNtf@FUJ&!x*Zks0UX2==E8GqO-`&?$Dh@FQ>#Ro8x=Jkd5`{Iuf#Uk6+{B&k;|L^b zRRGv{Q%JO&kRaz~@258Mvgv+#2un$R0)K)c)^W-03ezaMq!=5?jQXi*-@KG~-qI;` z@G>~>)dg3rX;0hCt%GqwRznpUUl$r_oU{Tt88PLKoUEiQQi||S>Y_s;3dkj9wLDjZ zr3Cx0zJ_>c>fqaISU6go;ooSsXeBMWW*o|Y?vri1GuB*_&P^s&7korv*yhD$EiZE) zK?xR%^f_LNeN4Du)2`nXclWu|UZ=W1yqf5Qfm3_r zL^L_k)Ht9XK=MC?u;z-gRNdokT_bxr0KSc=g-IeI!2ZX(cK(&#eTpuHu{5R1En$mN zC;W(PRkb%Ltq);Lsxy!LBcE|rd=R6!nv%c391KIJv^Nc8hP(f zqrHJAb`4^xLn~6O>utNX_U^37vX1ybcdt~U72Ek89-zl`9M!SW1ZqkOA)y9Yw<~N7 z&i3)NA$DlqViKc@4Zd%O1SGGJyB7wnLi!)jxjiZ&-~8rC+nb?MjuRqYS`;!`nnXH_ zx2${>=HPXfQeZOg%Nm}WvvS*c82eyLlO=H$1)85^WWmv`w+>ZM0voQ;zlwP7_F+eg z3}0tOAc&*MSV};aMRBsVLvg~d-j_9+88y)%GdS|4vwL;<8%o?;7EK3flrv*Sy>3wt z^PC(G6)2>U{Y=17|MMZ^bK+^`r}y!579tcuiBgU4ms!a71+JZ7iUVK=;oG1?uj!ax z@fO7qO$1p@k`!k9*yC;7<|_V|Yt}O0DjLC?ToM9r8;me>XNDn<{%E^0B`g_qXW%tY zSL96$vTMBO2?AU~1lu&&IVp=xa%i-0RP#w3jcNCAG!TQZxJIuhX>qN)X(F|FpKpY- zY{LXm1UbIMd6QrNxQTlAT$L4WO=3N=0P3;pTNK$$nRsvE0H~s%+)74@UWw3-SBW$q z5ddwzPS=gm;k7kf z3u>1!uh>CEoBlWLXJ`1Y0fH74?{NyVWCW4(*LO%@nbTj1;g6?iX}~`0`sSsw@E?WI zHXFS_y7c7of9S04PrjQ)E~5+3?^QvpHN~$(LH^WXQf&if>Bq*PhV0|bivTrsn&T8k z{D!!V-qA_*C^`r~E%fV)F9;ik4hL-`=XI@(SVBsxm~Vd1(&Y69?4;^4u})6RAy=7ZoPx3Bf#Y^Ov@E`^ zZL>M8eY66Ofl0#v*b&)Mh9L4R17eYk+{#NrR^uuWl)O1O#_9278jjiqHN&)I=)OGu zZIvf*Rm0Tm(NZ-sxhlb7=P=*`6fpN5g(Bw_7*(z$5j8xxr~u+iFv|4JVvvc0p4y72 zmDycE#N z!t$wOhtzS1xbjw>#%E$Pi9#Dr&D7I4_X=hvv8UFJI0h|4sj88Tu8?W`6NFb z&i-`6{XF1bkT4b z*e_3Pg~%)sBc&x;^&JUm||M-gfRHmN7;zn>!E_LoJO028A#UxOcSTWG8Q*F4kaUq;V! z8I}eR#hS~>I5R~cf+pO|1i{NQe<5BqRW|0V{M$2TexFZo@1GgzF zO=!D2y&lRW&hl{9y>)BoWO)@&8hwpo|798qZysqq%DOGH0&e|j#iHUT{AD^dq3wQ(l;94`H*T3Z!Y4X$mlI%j;wT)AH+E$;AT(=7 zc9`_PFS7Kzd*Z-^3qE(ah)g>;oQ$zUCRT_ABV>ciUhn51ooEHLjq7cM{G%W*3YsfUqvcD^Amrj6J2X}7|pFT>W>I@`X+oX&C z8b7+_2ZLTj8i#?nK4pAPnvl3nz)cLh=i9Bi#nsjOBPY2g6RZ_;cI99c>tyM#joTg# z(UU_MUqlR@K|kS6N2^uSuSTl@`1=y~9#{VnZX1=ao@FdwU7s57B|465^l(dYCKnhM z7ImkEWBsO!h<|i$`&9ynPAz^nNVGf(zoVM7Kq#ZmA`-`0qvZ9j=kOT-`QnF2qC8E3 z+n)dSg2I-K5f$>=bUOuqlKfLo>zeE9b%G)n7RIi@Hl0cmvpZTR&M2jJ)sB|9*OlRe zabO?xYJ>)gkqZ8*M08%mPjN9;PQO!pJB@nmOPrm$WiE3S?%cQHyhdr-?T+!YKkhyN zeT^%l^n9Q*Ns)3JHLa-CEO&%(A3Kmc32B9}P*I)d@X03AbPXLj?5&JQ$TcQeG1k!c z=KHCT5h8PW^x}vo(HC}Uywp_+^oQ~BNHzDen5FDHN5CH}bdotK4qc%25xK`txT0-u z=Gp859Kw-V^B2Q^rqZ&?2IY6bQTW#1YX57v-CT(UtS;xLn~WKshqXohw2hPNeEJ~c z{R}Pcb#swi5j^13m0q2+;HYbNuWC%|8Y)QnOj0nzI>a?EmawsLO>cLYGe|CKeWeow zk?oB76XEoH7n82`@uoXp~KxYY2*w7)2FoBCO1r|%6ujbNuO8fI0M5BEM3GQGZsIYM|#Xwx|+<$k_@}TCLt>U!TXG5YY1w{ z$B=TtUBBheDoXj?nKzcv?mwvP-p`*$1>fi-*Hy>LR($H;mn_I>c_0C?i!7b2YHG^v zoo6++Mb3}tfLSsTajcT`d-Y{P(;MJ+srt26DMj8G)}EhGukh7K$I-9V`xVK+0aXT! zBaLf0#>(-ejg%65BeRC$$$mYd9B((?H>v#$aXdOWIZ{dvh?*~w1&<4fSHfT#m!d(j zE~{B#^gtZqW|tP0Fe4TcDFMAO{$7yI3XIZ@rb1~}*OM_7iPC&A!!9jQI%L70=F`E} zgUQqO*l7)vf?vR9p;+|ig2UwjSksH116ea97 zjaY`KYdp~mM3G@f8NBZId&hVFz3d&jQX5;<8Ut@VM$&z#%RW7!Hxx1Z>-#YZNFWF36!qb6Mrmfh zTOU>4uK;mUSIsq=p48t-L&Z6dV()c8?(cG-Cth&xB1GVke>*_H*bS^Ri5 z#jo59t)I?*%A?Sl>BgGYXAbHfm{@YB)8^YjX9zztgY zjfxQj))RgU*_id}0B}AdP*t7)Du(*V-^=;}*$q)y$yworjPTX;sj-VPAuG>a6=#PO zy&p+v1oD4g`F(5{s=vn@MWVf27CLgN1_DBOQt;GsmtPXQS=TVW0^0@1VMI&K)!taX z_qXcMpFdDwn z#^)=x5E$gV5b~KMPq}Xc8G$-fcutXQOBwXkkYn1=)Xu$@CQ<1b4HjnFDwuj&fc2dj z9DuX2VGToDzGiMqnN3z#5OrIP&gOcHXbXVFNyxvli#D~MZ(g9_j=`E@pS!c~Hz&6m z`&Klva1Qk*-%>SA`l3XR6@EHOyOL5fGi9<+5sbq%y7vrjZ9}_bC={;5DNh`GNM^Q9 z2%{7ja&=H$pGL6Rk0NCZ8>4WGgHj}qLU!g>bPAIUJ45slk}v>mfkxEzltS-jYj)Ce zySF9+pN)LX`1(Gk8F5M1b6-e#r|EX?BJ)PT3WD%IgA$< zA9N^BR8M+nH9e2H&HFN5Q?RQ|_x3$~(^s%qc3kBm|G;A~UfS3fsPL0UW>v)BCbrNe zm@%nSfzLw6OngKYLnn_}A(4j9kyp~WKieE+&|;z>lHWwpdfNwu3Mm&THRL4pW6AJF z`^T1#{SJ!h<0xEW^2Rc|Yh_IL_j$tUMV*#e27Ap=Y>3ux-y9N9BxSvl5pJebCS?uR zs0j{{Xx^)b!e`<`=*lGw_El@CfH_zD+s5f8Y(OR%45a3aGx5r^6nYBJX>Gm93jrNH zAS5%YxU+DoR8N5v5|t~nmp}A=B6qinGj??X$+NBw0-S@i1vl;NRxV}nhJd3NS}#av zy?I@C>s;Sa&(rTKXT$`J`5E~; z$6Bs#Cq!-m`Q59&p3POY{4}fC^aG^=kat}7B3Iom7hgH{E?i+X32nE}JALoiTW`6q zCE)^a$>co+vd-_Q*spm@>TdFX>=pjMdmx)QwA3N}Bb)eN_distUHE6D`d_5&hf3n@ zt3RU<1)nk8&JmK8@Z%&lSNYVPhpSdJ+ zjo-sjf+plwbH2L@J0kkf!TvFe+>HzU>l5oxdp9!_-1Tac@~88eh|~+EB#@#$pr(GC z0p|Hr+g=pXNZFQ%P_=z}b(;<~m`q{vdIY$Yav#=(*KewRmm(&u zq?~lAI#z({%YC{UGuKj~1r$R;kWojCx#C4mBI{s!KUCo8V-SkSB1rfTIA)E$)LS<}S{-uOE0b7*gemO4hhQJ@H$`ryq?} zBF5(Fl2%)`;$&~{i=%XlW!L1^^ne#Ck2!rdw}0y>J8?|dDx@~Q!2|m5>~wN9Kcpe5 z@LksPb)G1KYlwvCD}OL%x?7U#bQpkd6M)9TD<*fr9L3%k^lM2x?&nkGs;cpIFUn>5 z(wN@KbLeNf?fU~pJO^}QbtMawu3@pBVZA2mTpvBrwEuwgaOO|%?nuhs)AVjCy9#dZ z+Y&Oc9(T`uO;xmHmx(-h7rmEmzQ_}es93T@4zevV*36jYvj^+eokcx#^^La3D>@^%otjL8fhm8hB3;cZF<{kI~Ocy8!wUV>o27EO;)0I63UVu`gTJOZ;~tg z-DekQZ`2X)=Oyr75^8zZa_pg*8o}I!p_WO^s4qL1CzT){VA;G2-VNQ z&EJfsoAz`1yJ^ogFC<=0GQ2ZdD{Dv&=1+utw=1ZO}j%?x*kE$qF;)*Vy1b~*()BnTBs#n0}esQ z&1CxZ9;K)@kO907Qvh-hU%#m<)NkBNdw^OlNZ7;B_lU7%qnx_+hvBus&RM~yrRCzo z{o$ZGeqV3)&b6W@fxfnN;px{j44H{ml=2vybB-)Z;MU`w@mu+0?}rw)b1)}nNPJd7L+{QM>Lw?8oRZx_TT%mxDg2)HRf?o5OIbG4{r zX6LRQ4HH%jMIGN$m@jLq)Had|cr(o8$1l3K%Bzah&D)eAx=@pXfx8m7>1r*r_+}vo z_7&m$dBZ9G^G3n?rsn5vX)foBf~|Cv?Ti^7oxtxkee7cd;s^@$R`>sN)ouid6FgW8#)*16lzyf1Wm# zQy7t7<})AZnI>Cl8*@bUPmZvbQ30wx+Q)5?%N|mbd+FF*KjnH@tDS1-y)olOZ|!PZ zmPi*hMilnFTlx30=ZwF&HzkZ4IvYnTk!beOU>zec?Vm{%=f!?>G*pcPYAsaJ1%Y#+ zZQZE5Cfi^Lk+B{ag<70z;KSbH*hUTXZnmpq`1Kadn+X>5tq-Rb2rw;|z#_s%cR(q3 zn!h2~`j4&ej%O?9|CO}=qwzg$e{~5eSZ3o~v!!e6>(bOf;RIIOVv8qpsQmYgPsBXV zG#sHOgc=!|2R-tdRkL_R;S{7}#nSOeOjcdxv>qb~!t1RXGcBFsth$~JYUF?z7ug|k zn}6VG*^Z30wQwXNjaHFIw~T-3?>~-73uI8ZfLlPZ6W^ii$8Q@OiWHU$faC`HaG@r! z+Z09AI^{3_Czb0o$UVeP51{;3d6B9 zNt0S5qwif*G>m?NDRN0>wT`>f^4#HJyP-_`?gbhZCc{)htkfx$&>Hvu$a?c|sMkM! z*g5KGpVJ~K%sHu4DoL^pb2=?5W$Gx(HX#W~W8Y?`m6A-Q>`am->yTtOWt${MMn*;k zgE5B8n0@>7+|K#_p5OIc&mXz2acQ>C{kiY=`}KY;ALuRAoZq<;cmo_o!cQoiso(zi zsS!A@9eOW?0sjmT3_w5fwp)~EXrDWTo= z;tlG+@=E$-D)RdwVz7`ZZ}Z`0x_>!}Hxu=!7jKCkfK2LTvrNtbPkF(FTfPEto4!OO@RK0M zKD*Nd2j{mx{hve2x0iUua91fDy#z0kY_&#R%r6WBB|%9SmKuZu?43MpjIXh7^!Xu4v(iMtnEDHQhI zXFm!In9M`ksu1|PBd`o5S=a}`p}c{wwtKc;dzsLMh~RPo6kzu(E4m0v5rg#7pBf)W z=o6VHe2@q|mwRM&nn|Xg^G?FshufWt8mly4_U#7r!HKjQJ>2GUN8$t=1yCh`fyI37 zKydM3)(&LH+7?+#%CG{TYrTqisLfwD^aP8q6e9+ErC5TP^!U@zA!Um~s1m9d2fH1C zk>v8mVuOEtYd)xuG)C?-5>(8vl1$srt9X6+2Iq-}=_lsLw>Gw(x%{Vi76ML7+F6o} zhR+;<3X2rX_2xCyftSY{LiVo$E&dk_nCc*+3T)>1#GR{=)LQD^rL!|HxpUz0_>|eS?08nkX`0=2e zf);`c;3%2_k*r(~XQB>WA%Fm0GLBg@Q7F%C)3u~hRQoK6>!VK4+6>@*Z)0Wn(ATc& zEQ6d-$cW#CJ0l}IH1JvX%!8JV9bFmJ5hyXqHS(oG)yRSe@JM&J5tIbhPo#tdL_Y=zs@ zT`C2KMml#|Mg%fc$>?6-;niG_Ioec|x@mbtilR%>RKMj%v_%WnKVM++^k6HC z=xC32#AS@T45bobrQ%s*+WcWwjRNqeF;Hxr7N2I{@HOtx%JFRb6(dqk1=t;WTzL5A zs3Q~Er%&CoqcL-d@wmoVY@m$S?zpA(5~jfImqfJ~Rs^bQsE+tt#?5p{5NvS2?B+JW z$ct~m22Rubm_fUm=FEisy!YsDc1G#;t1W?Xdp0&!YA4g4Gce$fV+LTBvqnT-he9Y* zFi5=I)rqK$!$kSz7PzAp5~A?6JEQ1l4?O?IZH;V)Q9MJ{mgE-8tYp&*!-l`y&UcZ1 zKwS_}fcIee@vhJ;N3gbXMPM6`kmCMNRnDw(_hA1HWGwOom;*ZXGt) z0p;^0th*b}udZsnogxocDcRM)Z|fBNsM3R$6NZdVw!LSI?+%=TFCL;6T;8meMwf>& zdx)Nm<;I*hkkQAG+&?<wJ3c@64K%dhM`CxyiiEGRPLQ8QY5dLq6atk}m#2!!;zg z5D#`}U_3GhH=X@xIU?c83~ZyMydL7rinL@q62wYTJ5#EgOZKdTu6LUu&gRFrbQkBj z!NPqRkZ!TNkXd{sY{S;4aIwCPGBjn5UI<0SAX|g1FMK|=m&c^*c4*}(!b*vH-#@G( z?ln?zhMP_tNUP5LvOZ|&bMnjn{4Z>ZM?&#s#G}kkJsXR;mdt?g%F-o0k+HQGySwEgGx`fiO?ia?MS^1 z%;p|ipbsoPigy!qAV^QDUPN1jGuTgZiBLZHLr^ceteVtq_evQt%^21Jeyn`(f=+DA zg&@7HC;F~@mGo5|=T_rop}qULO~+_bif2@}Rb~0AWslcwLM7*}lhA$oDQQkZr=eww(?7neqj@6VE7W_fxIE7X1=H|)_zy)4oRKJG zwmkmfqJpB#EAqbk$*^V$4EQ#|_Bo2^eW#^=S7MGz`&=$)^}@at2YeKPsX;89L(h(p zN>xZ#-*c5|LUk1Iw#FfIT*)(63EFy>(I+b8N39boTF)s5fRblM?_V|CKmT!{lK(GA z1oU(N3*h*FweSCH;(l!dNn9iA7RV7=eSkKc6D@5eBtna2$Md(4{%txn%E~%2c-ArD z5tiUJCiu;G|N99`)n(KTg~azD{>>;FZNdwki`&}~WpUqfE@7V|f<|1%FAGcrHV@|K z&m~4Vfc|Rq9sV5f4Ri*DMw`*yX2~oy?m9Tp$pAK4E*fC90F%Zvp3d@!#f0^jdxYKA z4@tS$*m+|E9#Enu=$Rr@i zf@g1rH+B>`pDnN1Wq`gD(d_k4^XNbC9>?o$d6DzvxZyPH8-AoNpiAps)vU!;I&h=O zwW@kMW0w8PkOKH>1&Wt3np0CHzg!)i$|qm61i1$ZWyYWFbhthzDvrI^cudu|$Hjo+ zO02-HCPC9kVNWE@5z0ZL2Hy;oy@JDzh7Pi3sy&oU{JaCG4NVa4>}=+J%M_iYQndQX z)6)2sZNs`-B*%vbx5n;f#tcz=8qD{*xId<9g1Q3L+mz5VnfR%HL&ZlG5|osj{)XOR zp7hazd%V4;sj9Lz^!4bX7oN7ZK-%h_+hRUHbm8%+@PxUSYg#puXf~Z*GF`CU8IYv) z4-~GjXc<@82q0Wjj;W_Z2{U@19v)0fY|V(TR)*atN9P+vSRpy(+_Q6g@+fKsmv$A8 z?gzx%mqzg-!G$3A%bS+Zn5M%eB_`@4_&yVL*K*_h#Lj6az+$;YAx~5FQs0Lh2j7J7 z_Egj?*c3SC4|h75Ak)ZXOSblOjAs^QE6{@24rn3Bff$G)A4C)++||K z4r=dQl~rb93gAbxX_ygb#L z-$5=Jjdo7-xT?5tp^AJ6Y?t@>8?skn2$H_snqo4A#F4JQ0=z$x?V=U{^POiGIGbnB z{mbCo{>%UA>HbF{`-!?$(y9ih%)`sm7PqLV+B!@1 z=M^9(>+NGiW;f z9+f_+;phukhCSg>INxN}M0x(y$hkUx^$h{?-XGaa56>0&7Y3n89E00>xxlUVuefWn z!dEI}&$cRuw;rC7flU`nyK>5wWev9^Ylv<6D{;C}Y%1nEnzvE%{BV(9TkRsTUb2kF z<#uCKMxqSGiT=@DozR~wO+fb^#lEam(U87&?jtl(*M zKIzW7D9S#Nav-~H95s`_h^mK;J{trEUAVqrHe+!lZiJ!&dFRP@WtN7V>O53O)ZNO-|cBS^sK?DaqfQ9 zNwa4vIt!ci9ezX5w90bT-m#=bS4wPm+2OOioOM<`Ie%`&H8W_~eo_dj} zNSR#J(SD3m1E**DZ>j{e{qpxRWM!iz<26 z6HhmrG`zSvqD;w-oU$StZ_4qt@<0Y=T&}(RTUwm6@yfeykx?mRt+dO!cLUB}{Y^{z zVW|fX777$?r)|ac{DspKKA^N)we&IkKk8OuAmpkonwggHCulB zarsS^@^6-H({4vA-g~1$!<#>jp2Gfi>~w3k*p8BbpAS4)xxX_sZfDy>RKp&dC6YF; z9DX;Gc`9zu+N>nSVst22axL;ko9~6h^H&|3#~6FInYI=ex7EL7kC&wMevAi(XNQGg zYV=&Q02hoKplGQuV;+|oZG$H}u`gOH+ArRXt{%DMSGa-dxfj1t{m1k^m}f%i!CiZY zTK0!hIY;)5j@a%+IpwBLoAoHeg=QrrSgtP!-_26$9RWRSlgCdD!P2vE)WVFKa>OcRWR^F$t4V;;Mo60(X+bMJWVeEz>u^h%7v`)2X*+wrCZ%`;F zDK^Nfl*9;EmyCSs-r7_)bpFyRETuz*c`Rb7)|s@0;JSZmtn>MkdE$%e0*o1^srOT( zKKJ)UPB1)6oBR0I5)twaS_d*@#r-WEolOGbpF+Qu3-4Uig?9t`qgrQoS@ef9Rz5(j zOsu4@U0PUkOq(R)V!yr?`#E3=hp zF^634sy51wj1eC;S@$BAj@sHeG&)^?wqIXdkVpMP$v0E82_zm=08qUbGNw<#;F0_* zI=8L_wPP@^^iqh5hnhgEL)}cV`dp)BJ+@1K=eKRAGJP83_Y7nMrJk59!7v5fN5VbAN7M+9{_ze3h+(_R1X}sdRn|C?OCEnPqb#zk| zR8(=k_+{ywtLxx)U_62JKWb=Fm?7Iu_-IygAh;#YzBvQJ-f0^(#k5@%1oOVR+g0BM z{??8Iafcqxh1I_JN`G5RyY|;d1CF6e9W4={2uN6-EQQZ#-T1q5i_YIYLrl-kU}EKQ zk}##d+=y^6X+@Z%Qg!$`Ci~y`c}>xoY2<@K#VSjr8f0BalXs6Mk&|SBR-^9oNr4l( zlP-zC{n4-Avc&iy!}F@~1h^lch2L-4jS{+T@4{qk3%t?2SGzIZY-bxd8IIO@^GlCM z^mdlc^>c*!*Gy(!*7zG5>)GzRK44g5WXD21Z*wZzR9xEo&h#Z%j29jvW>qKAJ00}6X8S+f+MgxjiK)8NSO++EyiIkV?N+l2!)C!LMKrdk4AwwlrE zEbWC)}tHRa{81WdziQ@fJ8;juRseqmTj@|!H z==8sdaR1q){>K)4dVC=>l`6Z9j&DYETF9vr3cPi9!7(}k@l&Hrk+Z>l_f_9upZJ1V z-a6F<@}XD$!c)6gfI)!v=?U$|TdTTc9fEZMIyxCyKqsjh;V4|1RPM5eI+E~lQ->t6 zLaJbs;Qf?+2+WvO*~_d2*F^<|hK8zeE^Xf?5Qm#ekCK0&W>LSYEDG-e#~gR82@4Ol z-w{20BEo*MbMJzMFfRtDUwpqhtxK&M0GOiKk1!A1pU>>ihKRXH6XEdNk}+`ddE%+a z1P(f5!LAiHJ44b$!8V@WUle3#BFg1(=Vw)?3yDo zjjl*#{JjH4S4rl$DQ>yD3cA)&3>5eecB%Z~+n4vtE~moZV$DF$3_@oO{@_pKe9dOn zDq{GdQ^wR8m2dPq+({>#hYeuVK;Q4Z5VlKGsQdSI$wS+583x3J8gfjsjn+)G(J>WX zx4Gf?u1?;5(0~iI#rgNY?v5+IpZ7qhfB`6*p4pW+9Zd41Fi0|Jo#*lWZ%%=<2WoI9 ze^!n=hf+Ha07x6CQyCAsb{y}9=PAzSZy)9UT`jm@PwSq*Sxj z##*P-?cw0Hkm!>Ub9tOm$Ry*FES}fFn+49_;Mb1EUE{xAum)+&hAO4NBRS7x@TD39 zVdhx8DOt#CGJ!IlA%Mp6K3*F@sZC31Md z_Ou^wbED2bX_2tGzWXV1g$`r{e9!c^8mhQcKeMeY>a1?%qdfBxhG zg@)&;+Km@Ts!%9(DoO?XZMURueCiB^l>;gg62w!O-^S?3d0?&UsiQdsVGI6hC)sm{ zh!e&LS~}~Qkl!|`jDhrUO)Pbcuxl0ELTbk*gJlWPK%-)y6EQ|No=@gj z*5S$nw@p_x!U4tz< z(<LItch+PlDj!R^!`OZbcu*C6@lT{(iYk)fmo2< zY}@3X0_&cftpUmda|KmQg`Z0v??v+@xLCW+D~OzFj66H?y(E8Z_W?H(v(E+;M;kav zzmYd#+#B>#_%c15D!YVF2tMijFdwM%UXLgofN z5En2z@h^+qo3M?)XSP6c)~IGK?gvtV&Jf1%v~(Xf!m;pvy>-@Af?07n-O z?+C2&c$#rUWtM$E3jgjXLW|bTYWvPhycCvvwz&%$OwnyfxPNs&Bv(u(Vu> zS?qGb(*SbDl#5o^DM}}SBNoq|6)DL($Ju&d&nw5Tg4wBe24WU;c!9Q!du{8&OXNDA zYC!j6pz=e+PBsN=CO*?UGaojpd$d#+d=xZeItEHJ$uiKQaI%TP6G_gq(UOGlJK*V1 zQ^=P7dM19Dnhn7@B(gzcu~M_eecJN#%AI?WrenMG3j0E5N#h*=+E8DV|FHn){+}>B z?%RJI(KH+WGf@-`ZhyW+MGIyCVXo8v^76o+EGAog#{-AV1I1tdTEAoXzwv1C|4s3t z2mD_-?75iqg&<)QIh8fPE*kvMUaWmj2l3x?o{^U;Ad)Je0o?*H7}xC@N_Y4y&ihs3 zG@K01P|%8)XN4|cJ`wknz}KT_ABg*cy~oRTnKbLe|G5!GIoRX%;QWEHu^&-?=+;vV z3pISKW~rXGFkwUuA>>oKCi$tz9~taU0$3xj8DwXY(VGu2ts z6xYZ(-$W0Zgfn$SR55A?=S%OzF(k-wR~<$25~70mszPT(O(qGK$L?4Ls&GA8*=0)uoN#5y?5HVx4dE zIY;@a8Q|S|#mD*97|1xPls=+kt}SA{9on74Wxf6|n~c)?6~k!h2+`YPe+6$)t!hJ) zi67NMLY#lQAgTBwKY03rb?|hd4fb-UdbEC8%s5B)Z6YL$td5S=_+teyVMDG8RaJTx z>N6hwX)KK7Y#|C6B7-6G8_E5qlfPT(PVckg-c;2|5dCCHtIK(L2epPbjdjGvV~wZz zsw%@dupHB_afCR3$5%G1X~9Ef?|?gMCG<_-lIAFf&OIoUJvlEI)<8rud4i?g<-uxO zvQH$U0^W&B4`lRf0~0M+Mqu5?heFW`zGJL1)N_^Z(d_(>jxH@>f^&s9jsxD|hc%O^ z+Xed(_v5=FR4$&AYbMSjtj3U4@V%W$q0$;Sia`vJ+y5ibZYV>>*jkG~et9*T6fmta zFL8IbUjgo2J1w%D)(=X6mzTYuFEdv_V~`*Qk~^=A>%QA38lbk*SPzh(Rw0ol{CN45 zyvRA}Gqf+wDm#p#yd27I8s2Jiwpuqi$G>^KcOOmYjnz_Op z=y@Z=_ov#O{u&ku0;0CU()E8kGEGPu;csW?$V z2L8=pmiOqhh1iv7pFGTNe9?Q^6(#ckSeBru1=MwZoVL0=UY|bBGE-F$&~EOD^&9fxUBZSSnbZ875`Ec7hL9+65Qb}gwD()PA!^*z#TFa7Ntd= z9&~Gv8m+cZQ%JQoNM@(6I|e>T7&A(^rk}5KeD#d`B@9FBwIW$y#aQAt&;(~f^HbRR)xj%`-$$z1~Y2@_4ej97$9-bV`>{J}~a zIctfwEwosiHg>JfmG!~`G7Rwd8f88|zbj?uFS#unM)2859Ofkaj`invcH6R#AMmNk z21}#NirsfnZCQ%HKWzQL<)+s|;rgMG+?PthoSc(9Y&`05)Xl?jhmYlQs}9<0G2~7R z0O`$5DMW|tJ!V|154xmfW8~E9WyZ_zD*APFuwc}@QOOEwATVoCcX($;J~Lp61pxyh z=Bc1pgmD)Zj8L_H1VPs%R;Do+@MP7!5icu<=re_*2sL-diuJ3*$8%;p}y!L3=Yv&L(uXL3-g7v_G1qSXm)Gdqp9^}d2y zey=zovp=a+=rXl`#R^!R#>Z*Qt@mpQEy$A$n=$>6&X&3(l8U&g)c%Nh)SuVh8>lch z#T?r@k<~JoRdbT^=7$*#OYY70T0FE7)Yr5P>g30{`W6>C;H{rvqkL|QxI3B^7X3|LoPBfkl!Z))BT%Ydf~KW*>TXDU+%HprvKCFQC6w zH*IZ%J_XEuRB>h0vE89#qzB^jZ6aWKb1VP0#OBxE{sv#IwRDymAC=Y;5gp$%s9P5k zX4mws8@UUh-*L91k0iYgfO>VTQFGQw(6NC-S_A`66*|J({=<%8k_|RCv}1-=?$Qoj z30eN~v+7)GP}_M`C!Emc((cmcU3t#okX9yFQ=#~hq4*V znW&Gvw9JG8{mgN>Ladn&F$R6h3*}R(I z?gzF&x%Svkjo)z=(;LkJ%gCGg_g1PxnG`OyoFxVa0IP{(SN|lK;lW%D2~Kp|2A6iJ zBDr|Wm%)oI18njb(!k!ar5}TS`+%$8-pBRIBYcvDYAtvsmXM7VWXJ<4A*;5^1LELI zWx$<>&GbqOz>8xnBEqd|S zG75WBrMeiqzv6d{9H);*_SSb2mhboJs{7R`c|079MRg0`@-7l4j)Gc}$4Qa-s1jA3 zdj<80l`}5=q1Oz%%v{AFZ^jdN`Vfjv46_PcL-A6pM9^G|GL7x#CWf?UAj>iaDg zKdPGU2UYt9nXFvMDV_17F6{XW|KMFlDjmn+gKWwz)0xz8N{`s~z!2Ff%3N3L;Lt^! zZkN4sGq4KSrbWQP2W9nRja11l#dpf!pH=HYyZZi`Dk*mfyUd0kDDeYHR^(A40j>jW ziegd^yt5`H;Pl-#b=q40V;V_*&>VtoC`0Mfs<1(R{y~0$+67OqxA-out*W}kOwu^k zoD?SI-7@2%AARLhG;=UGOskc2j(<+D!z-jyeZjl*PIJjoqxJkQE5x3y=%xLi=4i@p z65aS-6nsiBin?QE6DQJr+9ch*Ni~4{YLHfczhH?)$vfzu>;&ZK{#3vH6^r zvog*|xdVESUJ77L{X-rCt%S`9u7U#<%9`-l2ir%iBeJdLz2>r|;~`PD_AKKn7n_Jx zH6PeyHjC6~gZQ<^P*)*n-*NYY=aO9^zs75Bx$a{02b>JrCyL{zeM!}Z@BirxDcSsf zbno2BJ-S!OUm@!$(L1Vuv=Kwqmfa=-7Wc!-8QQ(dnUI!U&VCtV!Q0C3dzBBI1N)fl zQtFq)Duu@opLQSpi`^QPdp}|a8JIanWR2xj2;l<+c}z zJCikDPsHoKn7^E?n_Ezpy*u|k^`))ctqw!lQT&6y&3|J2 z-KTOJDb|h;3|Ah_^U}~V(l589_FNhlO*RrFM{05yma?~$e z;|y|^A`}J44crAB3D*?uZQ3@E^AZn=NG9u+_m6K;EM zRA1g-8TaXlTHRDs^<}Ss^#DiLV>PL)!z66EA2~oCdA2Uo-x5g!K}SC4`!Cm}FjOd@ zcNGv`mu;KpbpgZluvsX^WwNTaC=Q>Yu6{IL@qu2jECO)TZX5}Qss+#k4?VP~lSSh7 z*VYGr597ST>n+I5_Y9u#h(_Rv*9N%m`#=xwH|)jzv~QdUdWlT4kAD(~LK>Y>;N|Xt z0~%+Xz8roZW065+1JnWV8e@Dqvedz&K@i|)Y{wB+}ww2k!)YtRiX!bs{uow z$%g&ZI2Wy+={bVg->;KqfEvVq!NUb&slx9K;tT0SD2PY{Whlk0%eLk*;l!5lIn}K4{o~vN< zKxBJ5FU|8T7?Xoba5rKjK#U%9@Q-kfl%0YcZ=N&-*d;@DO<$+alL(U-@* zK0=*C$${XzLGjhrdlGJDiYI~(aPGE*EF#uJq2l5+cq3th3Qt2hVq;zYHD??hYaFt- z(i%*7uEsZ4zWjNniuK#Cj?zI)`AQ z?Rti=W#w)=xQGBzkZHm>?qH*hLsey6O{KywR~6$&5YBnVlDg)2?SLzJTU5%6m6vQ; zdxXatl6-{h!3_w!KY-w0p52^XWd8)-4pGQv1LpVg5>P}be4Zy$_{^)6jN5Im|E4sh zEvQz>DzU07+?ABn^wm#dmt$!FNbJ=G(QknbL@ii776z=75F(U7&m8{epDN0!q!2|^ zVpz118t3PN@!D_NQW%Y_twC}`?EXgB2XEa=2`Zw1DrXl&->K`G_?m5vR8>@iMS%yG zmK;4o{_cK+$CyR})~Bm56h5Nnl-_=J{6sZU+^0k}_pisf15K(Egm)En9vbghha}oo zF!(%3Uv;}Vjo+qcl|>4kkV{463so7Ha!pKL!N?>=lOV5-gVj|8ovy!z~JiEixfR5lI&#GWSiCN^$moVqkMf+BDvd zc-Or$?GvGc*$3}bu~n0=q7-_{FogtMV@TW4hP)FcRH-R;jiw$?e`-V;=e8E?d_1mf zIvhOa^rEGxw9gZl_IfZY=qM}6Bb-aMK=Tr^#Qh9+A|>9l!t6zypm$lWtfSBA;`4tryltvzy2XZUuk9|YgNz=lWQyJb^0z1i!7I0E_y%-N@QPS6 z>@WSjD{z00ny5B=*FegpntP4=8@)=;Dlt zEkjDBixK&!#=ZunI&FXRPmS+Rpermhh9;YeVl8ndg-0Ee_oabFQSO1)nu^TbRR<_r zaV#1IjuK`HvSJJKQq&Q6cyMrlIc!r`3B>6TbM%oPXNf1e-U=I7V}SG4$2$5`W4Y{h zi`{DKg!`ep&FYXQ;JGGR7i}f+wbxH9K$uQBM+zN4K2SY}NdSc4IgfY5a0^uxu-{X4 zYmIsagEpZANq}wz_PmufQRqa0gp02A+3RNjU+uV$V{798-qmR&Nem|%(i3M(CaFeZ zz(eem13=4%1aZjkkT5h&h4GjqhUb!|ksWG`cn%Fq_nOcRF&OuCJOj3`hUDhqgOfh` z7=RHWA4Y7{%gv`3s<3viYSIxUD}0b7PbBT6k8N?RW7a3UutO0ON}HcUBNuWLqiUbQ z{f`#%`q$u#E%iq5XwSx`#$teI&#SF|YUn}moam-BiwwAYgW8q?9C+W#T^4~w3ZG!8 zF#p8}eF}|1^x7`#=Bd_x&v6J{!zznZ2AcRkJC`8@Bl%2+#&J6#Oikx`NfOBfe ztE+Ro;%-84Pb_koXm%s#b28%gEFm~pIE$`{ip+G-OfKkf=Fl+f(ZD}N{g`>?Oq*0| zRCV__`s>qxqg6=%m4uJePS31D_5$_%A@Jk>r%C;n;!OJ2B0A-N<|R&NKe;yr-B?Kf zn)YAF9X-3_QpL~w^?S!^6=f|=s6w`9q5d-kzF`RVZzu-bw6??B2IQ}tqlA(hv-BrK2 zc?bGzs?oZi8sEaJ>THVNrIv1?Tj6yZpZA$wJwVOHs0ihV>&RBVH1p;9#}}a-n)@ob z6d^b#?sDovH6W&9{q&=dj!4BamnF{SX!YV`3+UIPno^n#`$=Rn~Uu;Gu|P@ zR3xxZTv;@FNt4tRy9#OWQUjA~6Lqv|?EbZOAB7Uqi-FJ&-M*R>G!1J^qYPM%vA{kn zs?2Nu2gCG6*r?10tWYcRr?|uH8A`%+%aTjekRm}z0`=N8tmM#>k$r>vIi8u}sY)Dj zbN5}p=pxT{ODc}_TxE?xd&F7ugIA{|*lO9@ll2AMB;2nJtWjzF3%7f51NS;;o>o%P z2tNkq$%>L~YKfR!GO5WItM;Pwe8>RpGHkdmaL~Tj>LV~O8m_`C7DJTJs{FQVR-~Kx zE-UR%I0rhak!1;yd}hm=wAJ_Gt>=gQ=nS3+PWU#wskg1P!Ipvsx1OAvDWqv!Igt8$ zbQ5asz`V7Pra2uW6#d#**KI-*bAZbbE_@;ip6+np(r@|wQWvQ?RFw#J(E*cwg787> zsx5_Cm$mbjD-Ei7uaq-u28r8~AtFa;C8Y%jSGx8biQW^M@l%5YKM1PBSx?#o0Z$jq zGQlj@UEQ9uZrJZf*6mb-1|N#ucM5e-5c(l=_>W66_XFp|?yJl+fe=P#9iJjz7PrSG z1M5tp6vg0-o?M-O^0x*sXqC|b%djJS%C{~zcqrGZLlwj3&-OM_PQO)}w|-frwxo_G zo4M!a`{Vdh9{G-qHh{Gd-kwp03H-wxjE76xmF&RpKKG?AQo5ttuK%8cW->G04$ z($S^(9%o~GksMt>qc!-nohiNfUU{^!GSZ>+tsCCiMzQ?`W$G^|e8F^M$-?GPcqc!D$G0|AQmXbQW&Y~1KjHA>oIe!lr@g5t$o7{^L7-%K@ZYR^HYJRqb}EE z0xjVZTO_K5&+Ur-EuVx<&rCy6RTAc>ns&EB-3&t>ExKd@{68|yiMkl|Bcfo!|*;^5#gC~Wbz9RH| zfE^CJ6iUyP8IfM0`Y^8huw{>Nk)|Sc zaz0592I`vx(Qk?%emt@Tp{0d+XRI{Veau{bw=M8;s_p^GpzCZtb0SZ^DtZ(O2YUo2 z!)`U>-1ou8vN^1x6qO(6H>S2|j&Wx&QjV70f9huVlJD)- z1|xopq+F_Oqe_e0A!;UU^r>`Q*I_pInqLM&8mDW}^2`{Bf>Bx|xND|ezvkg$4tTob zu^09~sp1!w{=tTU#er5z z3^f)t<|YmLK$GwBtfVjnfiB-DS{{4r3nZ~^N*7IQAwq1DG-*9}LM>xMZ)sxFp}SGq z01=ML{-O??Q*;p=EW37mGFI+MtEj7N!}@z;OlU5?*s!nQnZoLgm#FHGefFu)z$Ak% z88fifd28YYWCR|GLUR;rPg7re(S8)DezSoKS$wWz%Gvl;zQ`UTaL^*6Pj08T9X(r- zPGj|xNy;;Vp_)(6^k#Vd!u~md%I@gO$wUh8Qkp(R?T!t6_|@=^eG=g{Z*~eS0?4-b zT|@uvcJE`asJNEA4q6%hWaw}M`+VQx>I~z9@<@HP4fyD*(XXx#yX0Jc%qew&CL-;M zjYvs_#?Gu4buL_%;*BNt==lUIMKWG}Xr9-0>D%BpB3%r>Z$(2oil3DQ9=aNDY=vKe z%h(bw5*cdK{23hG+Rb_j;d>qpPG)!E?mHz^pnLHxrkF z;yXg58h^V3jr>~S)h$0YZURI>$RML)Ba4K64@Xi0vb=VW)Ytx zLXWptMZVPF(+_}sUwdJbml5;(Zz!^6HO`DVSbA)!B}Vsn|ca?3Vs_z3=f?rPax9|>zkfJBkBNH3YK*~5=wmqm|bK#K*0 zO*Pf9c6XB$&YnKg^r3B=cu_|g{nrPIZAeY2^K(tFA=5Dmtful#kvA#hl2yTqckw$$ zQ!n$=3+@Ie@KqsmUyS_5Bm2%A!FCA#Kepo_$3^=LP;Xy!d9e^gvB1Os_uqe3lUVKl zQpRtPLD=1m|HLZ)8CTv|a~vO;OypGX<`gk5@v38jr`G1#tR}V8vt|~8ry)T+$C)w9 zuzIUkz@b0}ARV#PT;3^28QafDTZ>Zys*5^GZ4EsWB97#o>AWuXg0bP{Q7-SJlDBp5 za4vOFC*PdFuT@1$iti`nP}un=ORf}VtrHSU;va)SV3y%CcO0F{g z{PJmskxUH}!+~>&^Shm|HdndQ0p7z=9Be@#?SHx5{N=96$kHtoHjUi@_>g4VosBmC z$6)Y(?wd8f3)vh!dqwK(+K+MMVH}T68kK{Y??A68u*(s(Z4hql1*|eaNktdG4%&_T zzQqjAU?w}RGMj+~h)tFT7>~MSM&}7r*$f8Bi3X4kJyD5&yZO!#RA{|vt8?>c0BxlM zlZ~9zSzawxhcP>^UG;tUaVcT1Iu}l*Ghfq!OHchvAkG8172Kp)krp6XoL%r;Tyf-B`<~E0)zW8R zaKAt_{Z@Qn%tQ#j$G#{9hUVJ;T1CNwM`VwL+xH5UO%#TB+^JICj}^+;bW$OWo#gWJ ziu824x~b7x%Jal7m=`!dd^S+EfMuXNRZ0Ekg|SxvZds)_9Bc=o}dQn zr$Zz)XaLxZ}zzc1RPY)uYyG}2SP%q4o=~vOXNKWlS2*ZobtB<)L*&WZnS$L~Kj3*}uihDCq!!o(6l_(X zFv-v4trRRl-bcmp*bSpUHLxmulq>W_O$9ddXf(MR6&x`6!(?J#Hk;5B#%3}2rB`Ns zl}46qm2e=Gsc?2+o?0LG@vIU@8NsKScW$4TzlusfKU(;ytcw?v*lCHlcmo6u;ZrV1 z;R1mSFF5z=5#gE@_YsWUrvwYJD5n)Y&s5#Ot8w5C1J)JM`(tnuYML=b>>sbzI25ex z)&kBIxt1#1-;Bph9;`2am$(0mA6Y&JAlGd8Tk>XMi40h0;o>61RCB>kjkeAUaWo+l zK91iheKRoMS1qbWWIijsIe=Ir6<;8ZsAS3PWpE3+X_0xrrOv!S>qkXB zOAMg*(zE#hKfcq1%v7%uqTXexBjqYQSvc#_L|ivOLz3sED*VQ={kAUZvqoab1HVkf zvw0oYL?p_obkUrr=T2j3Be)bo6WFYw0iK-+XwV*D&F~cLg-v?0uG9_veNOtl_Ot4rz@gZ$G%AGCopkvB zq3X@!q1@m9@j5z3D{YFxbW$mjgtE`+?UYKHq{xz~Bq3=+)|va1N=c>-WtnNykY$p6 z9oZ&{QG>B%Fc@Pn#_ZdDzAwE$-|z49`=dvXM=9f8uGe)vujhhtd?M0=v#`}3@B0Qu z%m;+XG^r-X_rR-lSLS3{hhx}N=E8b#xqLkG`p<>BM7br&*%UqCY$7T62(ED?54>p6 zGN;w9DK@m;f!x zO@RjY>oWu3FAX)4d_3O%N3B1et^=Pn5OIAEl2GIA&tNQtb`7q}&2F2Z(oj?i3Ir9C z)h$SyBEZE1=;}A=N`=OV+rVLF5Ao%rn|2AUauql;?L^d056=+=nNF8W*mKIfjP8+o z@Is^K5NWwH?!w?Fw_vEl;l&KyT2kSm=am)YmW8et@WXoyO#4-vDL*BI_5jl?ggOx4 zpae5qj+bQ{ClX!Flf&u^rTk)5BCv6_`)Ym9<5<9GU(bfq7+nDvN$Ya>|M|b2L84JJ zFk>UAoapj&|6jsYt&653N~mD{$tC1YEc7}j`&ZU|A{i+AT@U>#5C9Z2^4AA8kURhD zcYT75;{hK%4jd(xp4XRh)YPnNi7EG~LvGg4pb)a0-p9C<$&XTgNu-lVg!Zzl7% z>)=LyCM$IGHnn)ZsrpnM-m!|}_yq4GF(k;iiK;)})F>~XyGuRxoO=pZ7YUGm4fEDn zRRh{a{vVDnVpo>yC7#W9U)xH0fSO_&ih_e9KYd3eIc!%ww|scZwD0PU`}%jl2Is|? z$E{$>C&c}C6}I7(3Cbl3KAqepl5kWjRkvxdSK~Ygda0}6oNUbwd@B4@4zvT*@=!st z1GMI|)F{Qi1R3Vt%8}y0$mArXurjmeckd%-rzuXzBWszW3CKE~F|8OiQ9C{uvV-Q5 zFrWkKx`94%A6I{4k3}_8IGHWfqaYwbJXduxDy`l|{r*hSferf`4L%~;Nb2R|=tN@K zA9qJUy?66$`PksB4@xuP9c)EzQ~qb9>g6@GhgN*vK%@$)N0Mrm$(>nt2RopTW>)Gd z()|;h`C5Xi7DZ!`nwJVLO{xz~na@nwbpN#SOc3CFrECW9WgZ}sEk|KnX$Hyb2!DGQ zvSR8@oFBb%_&m`!IF%0Xh;CbX@klL(>Y#$Q*aQqwXhiK#zS}#@m!IaI60g5<0UXrc zVBJXLE!Q<5us)3^iA{{S?SY~5$iI|`9M8l21L(gFo8jB+z{4ENU^uMgYh}8&99tdrBB%U!otvJ1Gx8KLl zt;!oVu&2_7*nwIh9r8HxHW>{(&?b>;go(NaDVFTKpSb!ln=?R1kP)gFrD;QELSh9v zvB>kPIqJe=l?QjhY50|oN6$yfENPC2xPIt-w$hsdrhr)_JawXcfWi{VVAi;ToLi!LW!1ln%o8Q56^GBVj{58UMLT&t3{Mk~ zMT$x4XUm$ub3w#GAk+}D(p2GY4E7389~Q-~;jPnsgRFxs$=4{6eqThWKd^614}eAp z!Qo9oBs{lYYbJ4oWyO-z)=HP0C)nWQ5*2sOHuyC?;Itg%(Lw3b4VW||+Zn$9sef@A z3(yP#ESiUo;``E`N4=V2Xa4@;!VMQNalOiS*{tGKeVYUSxp$21DAZSk7OuKK2_pDb z``^l``;--A>`xhsdp-BIK@gNKJb+(4{~F<7yRH`j#U)7$(iQGjXI8&5*u1F2yfYq; z0ej5uTW_OqZI1WOlqRoVhn=Z=tER-GTp4eC&Yx!|K~K1Hh6w6RnEdQE`FX_t+U9n? z)p6RRD7vJK6&q8u~gHHN3#|Dct0vCU= zr>Z?mw%Vpi)|yvuf*yW~D$P#Zb>_lnZ8hBhawu8PoO%##A$1;MCJWK`aO=;xZ*Geo zo>SebDytagGwc&iL22d2{`=oWY`FdR3t_zwgbQW0*m_8#g)Mwmvk%w2BkEMtN)L@o zy(?V+X|bVtBotWSHDE(GVh74ps2QS0+^!KH{Y?bS?Qhp*W&4shYd(l7ywv5Lc+=V6 zzr2I%9=Z1ArJKlUl^KT^+Q@A&K4p4fx)IzM z2BwrK+_egcwY_aX?S(5FPX6`cp)TYi6R{_8DP3CZEvz}#Jb5hn&u=3y{J0+331?%@ zczr4>AUiP7>`JLibn-n@9gn9^-_(>~Rmi+#u$h_A)D}RiIgw%QRd!(;#uUf9-e*&HhmJbirV)Zr$mq=Q#T! zn!T-D@?js8jg!!IF;u|Yw!xjzB}Asd7$(T$f@_18j_>~vyAn^{qO0(mL@rn1@7z#2 zggEt%LXg)#oF9@8E*r!J!rm0tJ!=DPn&ikrk81R-ru5#F4l;5nO@S=j3&7svJ^G~l z#XqiPQOs^}0TG!onmZs()TT>5-FHG`{zQ?ry}imT+1^GDTXL!j%iq_2Zf4l` znxaVM4oZ*AsMO$##Go>R$5FI-)n1zkivNi2d-U1t2L(=A3Yf`|A#$&enVl|LPjXM) z9vizocA(W4_L>C^(f0IZHT99Edi*XiO&}H;rb`v#c>bfIG}0i|fA_n1!e1RRm)6}j zBi6H?{HW{LH2d)8j(Zbl?%cp0MYNTN$vvnPy>RS0*Ye6;yA8KIndc8!vvXRU1mr=x z{mBST>RB)AR5h*X>sBOkH?r^MJd$K*nt~AJEmr0I(^O^Q^?iiA@dM+uJSe{)Hj+NS zfeYq*;*gu~`^H6ml4OxNu75N|?k-V;)qbx>J-Wglt_2IwoGM^KTz zUQc`RH=GBYf3dDm(!eEkZ;(Wmmeex8!CXFh-&`zkwMJx0@1~50-U~i2LaE=oK{;-7 z%tR3W9qsV}nQhvaz^Xy&)~pIHY+D`gT3(I_T;w;SwrCAE*+wB>WXzGzo5n-xakg`z zjjN?$jt{O!S5!vr zh$s61ceN($!AKlhJD``aqG9y9oh915sU~klVvOnxmq7|O+3)q=aAE(m;HVlW$>5m? zqLI@f`vrxn#n=DsKAzK-*1p#mRQZqpL)P?MkPI&({{lA>Q{rd}H zF&@T&5uIli9X*gjBT1oyKG6L*r+{ql6LxfH50T9wqL1Nr@goQiLo9x@6OV)Prs;#r zijoG5lAE7I>OMIXIRi%8stC~xgDprBXr_G!WfY00S7P?7CbV3%aN0o8mO}@MNR~hqT$0_LfSO8sY^n3c>zc zpWGa_yT9zh{XA=?n@VHYcUk=$tiIyjTW5AKT4Wx&T~IUYKr94j_7i0;H5iU!$svQD z0=H*1Yam)73c*qq`5sklKxj@+K39^Bb3tbXP$G~(^k%>zl!qAuQyE^w<#}ueL@xzT zzyRY$QfzFO*~{#Td#0p$b~1D1*{_SJF@ilGc`fwe!rrfS zfe9m!tE^PUx`&V>fhx!gOMRRA)#c0uTX0$dxMw6gdB;(RN~Asm%l#NvU@t3o{q+pw zy9XRLvg`T^PMM7(Dx=!7#PShUu-UH%qccu*j93Gg!d1(_2S(B)K}hByQ2vP5D1}Bp ziOhs4WI^Z@4h5drh|J*`5Xq|w<9Q#ps|z7uuX+@x8Gb4k6@49OgBQk9n*K}P$*HI9 z<9MSr%z0&TGRU;^Y?i^`dL%=d9VG>IRIMg+=O$#3a=~^pJx4(woAPr@CzWPu9;isX z@>VsiM0RytdB9Yz+j93CFRBZ(0YoR1B zh3ZC}2HW+)fa$;%J0e4gsNh(hxE%cNB2aK;aMr3Db)h_4mGASCV|l3*xXLY^lR}o^$bIqkfqP`InxW^MlwTw z#`IJB?gY?#+9MGHnd12ZK*{E>nMgAfsn8b-t~hO!Vx-rRR1Vc^h^{n^4!17`1U1mA z5*x})?X1NskXC?qf<^)WXd!d?_9Go16E&*(VOLrh<#gG7btmHFYL`9FQ zf;*sJ^(N-PozaAu)c|F(2TvF(JM1FmmdAJa>LiG9NCp~mxxVW4#Mu#WE1Q5hT#WFc z=6=&{2&y%QIR{KL{F*oz$MrbMJrEINuSt$1ftYG8I_n}RW^AA&PfZGS;o*qj20GF$ zxVeXFcH8i!8i%15LRMKcVMaOf)Pd-W1^tx+c4tX~O7k`Fo3`d~K*&z&(Z5fH4Up(c z@WDhLb^)i>oP0U|X z{YsrGdwx13li+Md+H!RS8!uoIg1^E$qC$_SIi?HpnLwg5#qoVC zxO8Hvg2Dg{6}boyaJScNxnV;}Wd%!ff10Z(r*gH}_ zQ#2Lo*#_#HISiRS5fwfi@J!!|fwAUFiYfxUrHZ;NTRD`BT%Pei52jD)TOBv$qn%&F z$7u-1Zj0JjS-1o~t1_-B?~WUSRtA9o``L`IWoy76Y+e{9d|0t1r*>barZ(}zgl_FE;Ez|#_%`3<1-M5 zgpo`Um0YeX{G;l*&PvTV$ZNzRx|joNi@=4;r5)q_!WA{C=ErW3?njnUo&;$;grPeC z#E*1T@b4)cr#{4#%uxD2c~$5fft~`;=gW zS;~Ct=|)r`F?TXue*nU^J@Scw9ARA>OalqL2;VfWbh%NC_~g z|5{TTHLpMUdrV9O5;e8+e^^(`!JjSm1&gr%27xZbpHwb<{SP8KZEEqM|0)7^ig8ea za`o0X`L1pY$T8Tbo&M*{FL=zzdU2uyQ3U}ylEa{@*cu#?-NLn!>{do@O8!(&%#O6^ z$cVkSwL@_9@MYZ3EIaW8IWhLw%q9o5|5T|5%-}1{&?S^IU~iH^ z(Ud|b=j!h6+GxJAofF^nWOlC^?b;oNAb>?%GNr$(=UOROoHk?5&STF>lqYZznU~M% za@Tk)1~0bT>$+Wldol#|3?R4x(~7x--=AL0i+a)=SF*uig~*4rbb7WVt_QmryY9PT zm+aH1K}6{8AAaA)5F8&nVo(Y6vu(FyxgBihhhP^54~k5ea5q_mMHfA|I75W4CX?n= zzhm}B-LO9ucf>s88YKaZMup-;O3r9n*IY%uNocTw_<=Z`?3s1m|8VJ*GmRdfc@YIA z{r$r39i>C;*UEj}v(1@d=YSzf65$VMjZCAiR1^>&=efIlY0;6ro3%S)lK4zAop315 z`$OsJ6R9n0V^M$A-CN)lvZKqj(&x+%cEtscV)uQcDr%m! z1?Vo8UAMuvmCrHOL_t}X2haS|I*%$i*)ma)zhk2+O~qB^$r(aqJ8y$DR_zuk-A`b6 zPoDa_^ZK9NPaEGee}>pbAdkb#CfEA*|GVhwx$x(}N}6qtEWo<0UD9I97O2rlD7MtY zbj1EQgyy8U=OjzAcTD`HUM`1x#Dz7BY{O|>7$YQ0+UhB*=IXSi*k_k4IOG|lUJ%8x zzd54$V){*?yl_)*=-<#&<+&^c?EDb6-sO^n+)wK^uC|^_NWJ%`aS6 zOR-CE@P9>B|DL!jgZf&9Y>H5^Y5bN}tAm7d42rI$I12jaTOLZN0f(K~;v3H_^3G=})}gpSQAD&&VQ}9IHuh z&kh^WLk~Y1bFWBb*ojzl4#5ff!b+br@f#_aNV^gDo$A|1`wwQ)>@#ONSoHMB#m5(( z>*8kxvM^e?o3ie_l%bW1U)w&`IBTnlVku!Y<(&LK*ridCx1|U~=Jhn5D#3};4t*X} zCEQY;rWN(2?EX+{iRQK!9FQ^PG6Z7a?=9banJ{IXK2(*p|A+NHRGEDhTNajtwbB~f zgipNql=t;Mdk{ywXa5JTf_!vt=&hnA85r`yKCB^1J&*`ic7i)mPyQ!ETtY0n=}IZ&jZfjQiSqm_ zU1tKJ5@dps=?tAa0m7eZ%v-k#-kOZ_(0jo#VGj(@0m&&lzb*L8gNf(Q6yT?1B*jbF z{2vAe>b0R>Ym#c)2Au1v`c*MV!Ibrl7N1zk`HPc(*wP(FhGI?0`id+%NE=RL@nq>xk*Hha6@9?NzB zi#FUhzWqgO6v|CL_ACiU<09`V!M-7BL->!Ys^G0#xa zM4?|~L?8am((Ib29ox-@aXt|&rFiCC>I41;s^?tIhi$+4oKI$|AP%<~Q>9X6^`;xw zT0}dM-vi-5F@x*t0c}rxsYew@&%d?PBx_G&2w=+;L7uU5@V@o!qs43Q#nWctn}9U1 zAzY}0-a#iW%h_0cA+S{R4IpYGGpYp8u%PnPHz}mKQegJ!x@qEwL9+X?NB#G@#moxBbkStoOO~~KXwat z+qy@kydUziwczzDHjqU>ozK)9NR#*mJ}gHh$b@7DUTVmkxzAi4@i0Ww_34`TCwA$- zRrI!C|Kid_%E+fXXdgL)0?G2`kBuVNIT#-Pm5_ zbKu1V%*&9R*i~{)x9t!Y40N~gHGFi#tTrEc%iLux?PEpgbied)YGSWPK;WKsv{NZf z_`5WcZ`w#)Q{ruc{=53#1$)YVna(7(f=!AGpYcsKkC!-xC4q+mSg}YMBW&e5XBTOj zWg#*4gL6!Hg&7gK)j@qx*k>x;Uegx}z4OLOV(ObI53d(G0G%QRJ!tKFp5N!YabJP+ z)S0{sZcQW5q!#%*LZHS9NQj2+>2C?JGfrWjy~=h?dEL?i79 zLG(g{HDweeykON~_r198oASkft=lN`G|*VaynU1Z|JXL5d009&118U7$@#9{hkb@l zEcQ*@`QaCr9BAe82A$XYGGEDY6klWHh>STNym;laXdgM0Ak><0u;ORc-3_!D3gsoE z%0nTzoPAc`TKY2J)v^Z}5+=i18^*n<+XM)lmx9fP9Ix(Zv9}dZ#YTc9LmI8(q2?`8 z>vBpR(tnQEFxYnA?1hhu;fjA3Evrk1ypp94_}9ECw)^HF4O_fxGT`h4N!d+=-RX)B z0T6Yyk->?T`W8A<1=PRJA|$OmCe^YutX?K5j7uw(WGqNwP?p!Ala z(DjjRp<`eGrj{X1+iuA`^u@)0L$P3s+JQ@%_~jDUwXZ6IOn$C zCG&tGatmi!jLY+WUm}>9 zw2A0KUzF{=2+o|Om;j})u=;eDkXx|-$L1@=k)Ky|{$p*;?z$dE*BUR2G`P+kUxf1Q zAc|C@(15#rVr1B^X8W! zyH7}=NlSLsU?ni8XVbY{W?9$vCajW2W|WiJzJ9rF6DazEyp7A)Z!kIrm)h^HOM2^4 z5csmrBV&sa!|j6g33c6;O@N=mAhMGw1t&HRA`<4;A2pqYi7%-192m-4GMe3XoVs8h zXOu~4mO{rW7Xpmk!$b(wRPZZ)P@E5N#VMn_B7~-NL7*xoxDaYiAJ`~xM(_@*gOY|+ zg%Z z)sa`IV&T%T_H^U01Ii-c_#YtBMzG>BrWw2J`5?0e?ZfaW27~V~VUM|v(bKV8nteD_ zEgZ&BN@fzb!U(FoyLAucJ9sr9B1E;52PFRb@&7v{tZRJ~=seDaFF(Hlw2ZrnZRHGW zt_QCBfKrTnQW)9+D$V2Eka5+B0O|sG7p^9Z*in?5@IFxYbT&Ury?;PO{^oWSXbq2p zV=GfGkja-D>}~6?#X>YP%gvJmo~}EG0BgZ_9~PHVrcbOLq^@b7RyDpWR`FO#0dhI~ zk~dUM)8uqEB6t@7>?ELrSJt;83n@7TeA@tw!FU(Q-(3ddpxuzXj-m>hw(F&)TAsgp zG-4%s>h;2k-U(Iju`3_lz8i<4jKI1K5Am`Z3gFxrBH7I3QYw?GXsuKMT%hxWO*rt^ zOW-H(uT^V6^CclLu%m30p=(}<7rwJJprVl1K;-k)J0Y*JW~cN==;}H2#2Lg*lC`id zVS~wtJrXULkqdkiNKgS{7CJT}L{#AJRS!f#H<2z-Dl&)MQAC*W%BxZ$NAPdbYBgLE z@;EmRZGS~QkKqiww}u47q`9-$O zO>Fg{j?$gvecu1^rKE5kT{wnQ6R^rQEx9qU3?%GnfPiYU7qz^U9+i4D?m=y=hIK0s z4*O9$R#p7SOpkvpt7R{=u2qTLXG~G|ePnv0w2nuA-I_#p&pw8BzwtU$eh=!#?_rqW z+M*SO?;s+B;%n~f%>T{w04f7HMvwvmTHol=3u~9v@NAXf@^EA1o95BlhbB0Jr3q;8 zB5)ysad-V!vwWFmj9vG~(VoGI{G6gTwigKg6PB56Mu#Gt=42@pDRBiPC;5 z&=%-Q-*@e~m;1rj*Q-%{<(hppfKF*$T^?rI_v|s?lWbx?kB*~T;*mxZKoIR5YAORy z?35kWidArPQyc{ixZAN2g&ItX;!RbaHK4l-)no!L-ir^|XeUK;rIr4@q7!@2Qwyvw zO?FT13gF^B)Z%+xx=y$$~^qQ4z%=o*(G5%of@wR##=y;pt{ zNkkH?3+ay4OO?1d3~w<0mQX*1E%$MArm?ALTDtI&@!=P(OSxStEO$J}EmsFZ&!KD& zZFifkxe1pv3eSOoqklXpi9#U2OY=B5_{KyRCm;MWiv6 z0vRyQAK$Ai;e+}sQk|794;cy5a7lwH!@9miAe@6G<5UJornC=e?7wgDm90(7QJVwi zsZwgBMJyAs$HoaKINepGimu zuuFCF-v5f|ULIO>77UpGw-o|T`Y2fd4Ar`J<#wu|SFhB&W415wja>h`_yJ%|=WqFL z|BW0+{AallQWgmEZBLzC?2F-VY>`La36HIA%cWq6!r-`@Kq>y1 zO46DX%~NMKj=YV2-yA*B_TrCyQB+sLA5kp&fIW;%mKK`~<2JTi5v`gpT(`b<1{e<+ zZ+r+A1PvErC$ae7{G)LpO|r|H`#wyb=UycHSC^IfR>w(@JL>ixjb=e>hk`a5Pt*rk zP43-mWw1q`Iwxt5(ql9dOWSS*x0olblN~_0m=Wq4>R1(>1)vbGFl+byuAP6 z%>}|c@8>i5o{KM+=n5LI)0{r+@KFfE5AeT!XKFO>I~i^hkpUK0?E`<9i*3%=-zc#zI1{8OqnDOe*QLsSh9ixx ztte=E6lt1armm)o87TLXApaq#_r&?JjQ+UXgoHz42ntvPwpXjE%-2Ai^oW973-JT{TYyhb{rdQy}HQZrR$T)zYH^>J(TN84e7?fInHH*?fX@gC#0$R{N*I>~`Wo6Q6{5y&WRPV% zy2(m2G}9j~0G{Zo_ig7;G8qffjbc7RmR+yBcYa9IvbkJzCVQ?t#PsiU-j>}>?}^3Piy)u<(1-)ApSfj#{ir5+#Y`a>CP zGLzNC?x_5zSR(Wgpk?`DI-I>3eqnP zMtH1v!q#5tZanoO%YCEPU6ixuXh`TelVC&%pLeV8-$kn~?xpk!F`fl04{uzx<0;y+ zaevBe0i%u((EWAk$=`AeZcRJX5F0*5Ji6S5DtrDC8Qqx_Z~=7Lx|$tz9$m=5g8eGx zBcm)TttweuD{3fg5I*n~ z`c{{$dmMmrL)XHC8bI-T6DKVxtLW?5^>dH>G8Go_8E9n3_4z zG{@X;E@L5I@Mxq^T7lEb?B~G2jKX?CD@Tv>#}gj37gG=1r~}<_Hys}%p)HqY25tD_ z955h2vU@1q(3=2ynh8IU{w3;DL2>5c?fH9*RWD7T39WaG0?9dA{HVq{avGIu5!nBL z)8%>ydQWP6lKF&qG73~dN`1QYfH;TIv{QWf_=T8oas7<4n5iWRf4a0gW69m0EoPsm z>X64cZN@ZWe{R8-tu2eT1PIm3(SY|#LmpkpGyPg)Pi@rk?o~uQK;P72@~RYd z0n>y({6A7L9c!~ghlgK_(#n$M2(u{M*wM&|m2G%bjy?4iD=A%4tH0|KUUHH}$U=4? z56%XEt=aerH?QN9MXh8P0`m&A%VgPgrwz8CEIqbm8G>ujLm79Z4w*F2?wd({ERi8_ z*H32Zc1jBq8iKs|b%6TXq!9>~8Dh(^o?|IVRiRLpYHZIqh%CD0Bgp*c!AQ26;5t+A**A7S5 zuwH>?Un%4Yxe;p52Q`;qZTt$NPqbA1S@~2)y-0=Cyc))vioxA#&NQ&``D?Dv1mSo1 zw!FtMA^?~7-Mg%OI`Fw$lb_!iIw_{aoj8ZgyJ=Q)aDBtBntTL#GKR<}|IP?;_`dDl zkvaR?D%epepngw#nvoIskH|_Dt)B^@yyj~?P=ZUKobksrUD#R@rDpoYeb%Sm+A91+ zd$g!ncb{i;8t!M>MAYHeBLznOiYMn|HC?1F=JE*g9=`hBl4w>sa?jRU+w<}{^V^z? zC-e%rnrB(M_B@jm`pSW%%p8`gKV0YP5Zbp@#~^tSko5h)3ySXY*+{sob0_!4_mtj%AOj`a0YWl zP}oumbT!}$4S7lYMZz-ZieXHlnpUVnwmvdYSl$BD_ zQ@(>*Qn3=fdi=#V`>WRi6PwXmcb`_JZs*=CAYDuKy(=A=uK-8C=A=>3PVzCcq^)n4 zq*zoFGXa0d3!rYX3s6Pi^IxLt1>yC=uLb7@?v(`(;^jNv*VK7HfKzMMCxI8`tSgGzbz1&DmwgmQ0W|TMPj*wSIoKIP z@9h_nTQS;+y&nkurEUt+w$`~y-wmmoS2<6?jY636<^QLCdIPk5Fore-U=`Nsu__gt z0kZ$tOu>%Vg1Di#N{KQiTsT@81bXX)L4WBcsEWM>620Z9R$cNv)Cm!Dk!|?na*tDg z-nsSkl?t?lT4M%ZfTCyoNxj@JN1Pw9f|@BeGAFql+a#V*UA=ze5UdN^A^2b##fyF{ zAUGrzSK7r`vvf>ipd^swXUr8j${Y%o4P_!2goK_76wX3c#Ux+)vm))A;?Eb+C!u%j z0kVLGf=f?VWJnT3EE!wAsnYP-ZqIor&IKaIv_m42>bq(uV(Y}jz_=>3GCS~wfwrNN zl}poA*@_D8)ZIz0wOU}b7P=QYf!_}T7zifkDl0e#zZ;pQI7H8SQz&Z>-A~YkA+|lG zAdJYR^kC$`cIu|ca{ue~{G&7$RjB1+6}FA(@)IySTEXqUGRS^@VH#*j+?fo~#EJ1} z*G3v*B)Jws(Yl@@;BsV}P#53wF6Y14?=*=qE+=T%`0@-zVb;IW7#?8;R4ng}dXyo{ zwz=*KsG?~iy3N@qEfST);YNtv=af^2$4~pP>Zx*OcSJI>uP0q=|6N+LvAEf85 zx*8po_$0|^A}r?a_e2z+c)(coIA}kDd2R$oCT?P{% z4WbHKK^Ufa{3y_ghEh3+91CepuHCc7RjuXaNxbU(y>8e@)OO@{3^ zA=Xf%mv$8Z!c_4Lj?SDD$|1bYJZ(d?tM7?`)k-U_P_A;w3f_$Bi7;La|Krf;VE>?| zo(qS0R^9DloLZI*J~a>qQRXd6{I#i4G$WLgi6JE>Z!G}J@^p|1vfK|mJa&Hp2Oae; z%*;9Vossq?TM8MXK7lbl&>T3x=LbNHMO?TlxNMH7gdC-ina>OI@EIYLVZZW#q8WJ@ zduT*T`3~S`;z2H#!vLP1QNRLyVW4)na4y$`v>Ad zT5M?@8SR+g=`aayDfh znlZM4y@L1!SUX=iK=OtOWI(1Hji`2Af6j~$WnV!<&*}ez@x|sJ`ghS<8HI*}h`{7c zWO8u`wKeeF!zTpbHG}S{A)c%fgiIuW5xARR3x?(?GTIb2F+Cg`UWw6miLM?%ag~&B zGSikd(KdJJ@Ip91`ozq*fC4PTL{x<8+5q*jEK*lN17+2FxGYSMdIf$VY^jCgeC;`t7 zG68bh?Fml;sO8xL;$?4~QGV}AYz#mOHDL<3MU=((cTt{fiAsd32m2Rn?SUN= zjzY)JiMhT{Uc+J++koU+YzzP&zfSW;R5TPxHbWif`DTq&OD|S*AUkegt72n4zD;O@ zf_B{GN=ufnBL}gfH`=|keFsEl6KQhB`vDp99)tjU94>vR#)aP8*{tgTnlX_Gu3%ek z8`{^{xbl#=S=Y{9NBO>_b@7!Iv}1F|MBS*+&t-2E zP4W^Odr)0ce-KfQRNTS#Sl@?vf$8E$A$fre%sEbZ*}sbdODp1urHi|gUDQvZ&=5NH z8%POjjuv|Vl*47kwJ9`O*z_O9OLH%~HC)U&pEp`paId+>{H)L6?Z!3xH?L)M7ws$m zo7dg9yZSyAm)d~J6o*2)lt9W^eCQxf>kw}4Cbhmn<*^cOB5jw z!yYxfJHg*U9RC}nEc|r@a0s9HW*v$<@IW6ihTD1F9b2z-6cMMd3IH<+-F>d;XyiGB zD0k97I@Mqo^%Ql4^2pI&f=C~-kYaKscUgb}o>iN{%n@=3n%fd(v!heqf!R8M`jJD` zXC+g68PiVpKlRFzWbbm7jH9O?OUTup~u)|I5 zF5>|+FMJN51FdKz?CB|ZjoJA0`d$!$ts7ua?kkeN9!C7m(O>i9&rkG_3yE7D#Acs` zLRMxhpg=5_q3I_i<7#Dg1mjsK0ilKa6L?!@<#TR)doi4WQwQ>W zE-(WnfA?sJfj7*l(4uE6fL&hizl)TmWm%{Ro^AzXP1`enZD*Ug4^bmS{-%8`V^zUS z>;Y~F*4u%N2W9|4$7u9%LBWWc68JuWTgNTeeI0npZvQ9$(1Ixv*oprG zV(~a=Be*N=2LPv5M6YxtF!7)&7AUz>57gBsE5yT^RHcM`54zIsG4mZg>GWRX2BwQR zPM<`Yj#IK38~vukaM5Cu#+IPbHrDw2wNeRs1PFV-fM z2g{YyP*lL6r!Hdch3hc)t>=aG7-Xf;d%u#Fp+x*XTyy^mQa2XosGB6@qJ6LQ`@HYi zMH$Dd3s`dN9JzW+puK+F1jxkDN(z=a!-HmruJmReU7R$ zIb4k}xYa;iQqE3_9Mb3iU3LC`bS609)H;)+g!aXW z^SOIoKZgpz`G=&9xceRb&x6hDwD%phhg@PJ#9DIFT?a$I8dKWd%0VMoI!^U}5f_5+ zd+T#Phrc_v!nM795i_L{V+DH zi#4FM9;M8v>pUREH(tarEi31A!pJ0_^dxt(V+ZcfsKQknQRzG1{dSQ! z=BHmO`^kF9oXL}AKPe`*>9E1WCzAj-s%B`tGd6nHT2Xzykh%y;7)S35+LUb90HraM zNia58xn3TG-D>W#ZS=T@rvKT07m1M)5N{Pn=|fFxF1K=0AQn4so}Ni+G6v2C%2oFJ zW{pp6UE=4^6qA`bXA{t=P$Qo1+=J`=iH$bs74Sfx8q6+m@VrLethOC+z8OqpcC6$s z>+C6=8z8w2Ec3Yi6e@Y$-1Ww+?s~XYB_me40KOvUNr3Tw#s<%Zs0m zcO=_9+CNs}e2$nAgWPI#!5=Nbbw~zKQuTv5=WdGRkX*{6&yf%GSX5klN7a`5EwC=O z=1tkqUK+mG#^DTt=3gE=7Lh*?a;3e;O=TbJr&JjWl?ZurG8M|-o2{4vLjtI+CrOml z@u+Wl$dJYL&cQ_UD=^p;${Cg)&N=Q>^&lM@bAZCWBRJ>j*?{CD{|u{UN%2A!;6NDD zL>a~(m))PYQ9x)C<(T4|1Nwe}(U~HP+Pgv0w^RG7Fa=&B;>QAM5T?KT?N8O`&av1v zE#{!H9<3oZ?cbPc&^}+x;KOgZ3_Fi}C+xP!kByNXECHeuj^c8DyzeQvN)9Rad!>uT zB577FHXhSHhF)dfr=A;Mz}9KP%jCWuFVD0EUkG$bD}VEYL2(3&vCkytyW3O!`YI=B zKr%5qu9!vCxbp9!8n(377R)gRsjfYBEPmwOrq(SFuLF3}M-@HB27>ra%0qLPix3&( zYposLg0v;h03lf6*cf*WeLs1v_Eo40npzL%&e1FzyP7V%MOj4cn0Xr$S143@@@a{2 zI&Y^;cb|HZt=(ZITxhp5RZh=?e3v|PMweZgU$@&a1Jju_ulbrv7m&U98d>?CK41G- z$)MGILWsqqUDloEn~Y}{%Zi*|#2$zXn0bvT@)Y6sZxy%C_Mg5Ul{Zi;=3V=V3m73DC9Iw+!!Nu1lBRWi zVnZqPVbr^cx}4JYZm!z5zhuCJLJX7pNCR2Hridsns^|SU#)i)mKk2WORWQP(?qM?a z;|*(?wKbr+OUd@qkl{Au66rPCky6lxiyhU* zV6>1}SsgJNxa~yGw?SoL$m!UB7hP6`t`nj2hyGpUPDyitk6_oko(p&mcDKN70xjStO~biYOD{D!BGS-Qz7k4_E>E)a)<_B6y|+E%Te6e3(4a^! z3UM#GfVU>tnKS@T%jj!n=zKT}Nqmgv4Ea;D!VdKBkb`?qbmRC~-!sa#iF*zuVIwuh zqdeWS@!u%%Aa$Suy%0s0pu}tN-)H0or8^Yl`Z(`7^sz#`B0e)V7I!HOof*@TXK}#X z(Nqo)pX>##gD1J?(d8q5-V64F%Z5YB${g$WmVe0l4E(ie1cRQD=74C1 z{2iN*j65wqo8o`vUExnr?7O%e270~+eGLq$O}L17OvFTMvK;{oGF!kE7JUeDlK^Ok zR0tfQiqj{?(j-JSFp|Z8!&%Os+1|GCfGQFR_Ma<1I6to3gJ^Y#w4w=FN@7FF)g*@i z13lOchTN!JpFzFNQ%1ed&My(^IS|Q|gZ-6QQW~f$z>P6#6RxpOViWw}0`P2wXcUHB z!31gi*2;5XOy&TCG*9!|o*wuID%2+oe+_UI3N9aAZws0RFt@kZLz+!~MuR_*31!BWm}7d*i;FX4*p}N;yj^aAg2Pw#8frlj`{p2gy|!cv~@kq=!yRvZ*1fyCPQm&u%GY z>Dj_|cps0O8;z-c2DHOzv(V&DnNpElf&M>CeS199d;EVL=j3#{IMq=UcFs{LM3Qpf zDW@DMJ5p|8Ns?47ce8!Ep^$Y&ZmXoZkKC`zbxEw;MlQoJGt6wW`{(@L`hFje@9&TF zz#g;h^LfADuh;YWd_AAB0cW$*eCTv@?;y&8EBOE+ftetB$$_y+SqyTKJGsP!b~g*r zE2!gqe4E~VZ`bqHCjC>ROaJ_(^7>^TA^Z}IS#}${F%O!QAlqf#jq1#e+3|V(S?7i1 z5$MZ#a1`c!9E)V|hZFsV<#*(DmmYwoz*cE_MXunn$BI2{W`f0F<6Y$#r!&wgKxX5^ zlXJ3Ghgh&;^5|C<7x;r1$@3`F08;{wB6bRLFMpSx&hM^>>y*OiDPTk9di3#wY$*%4 z7Qr~#{$2)}I7@iZdTGB5#X)m~W)}eZ0X&}^z6Uvg7}a_znm;HIDXesN&RS-iI1_0} zIh64X5>~_L*Hf_SCt%LS{XcUqwt)py8}M*o&z2Jc9VuMBV&X_R`qVbg{Vgsh$$A-m z6%AHLA(I+418up4YVE`maOoC5Vi%#=ePusa0{!RN|SPQ|KG=vY})R8+86;O@rL zH~{&2z!a26NHN=SnxlxE|2j_Ha<&4Af6_<>);3z~ZUey6?QB!it3xBs! zn**%}!C2BTf>q>5-P5_?r-0B?x2h`3e&$WBAX$&EWdgQfaaCq0q)11A+OFhU<@QTG zkW`51C07Z_+v*d#sugr-*$9Sq=Gmrn9V)4?Dz{G_x7R@*gM-Bg)V2A;6k&-3&DK&V zEI@8V!!mXW(|PzT9#Y(7QPcp45vVe1qe|sQvd9AJIFe?P;nO)Y#6D$NE}_{GPS)3U zUx%}ZFa^p97@j?P7YX9KVH8CIoV(ydrSV9isene|T-YM+6hXE3l8_{aMul~aRX~2m zA1gHL`hZ~kObYCI6EoS@24?dH004z#HCzFD-&--~1hMY++$|prj4@ zft)QLI(#|DfUz_yCAKhl3p*t9dS4c#7Zgwt#(Gyps_oBU5Fvw4a=wCQP zgQc;&o(13no0FC6F)|u{1R{_mYUJZG(`C!gcuPMvaGO(WKXI!!&-^*vY{@L=S_md% zNeE@vP|y4bV1fdK!zh%ZRnI@C6?v(9sI&ul6oQJ8S{W|qBe(l>2TMa%W@>A-2Drt*w4P3{0Hi`&0f;W5dHHv>@u@Bh5ybKNzSA%Q4{PN=RxM5b>sO0N}7P$~;Hr*SwW9>q=g_Pb{zWG3! zL>zcuUYF|&LSRX+m{nEP+9rJgTZW6%O2%=360r45C3^#$ zfAq#`-d4S#%%$H^5P9;CUg~({5ou>&FL?rL%OJ#Ay5wFBH!69y;5<71f#xG-B`Pd7 zQ!apy{1Nd1_38Z+mR2`pTj)B=3j`9W!!D=e(j8x`s3atEdRQ_8)UqkY{wdD%FeHBe zM;(${`$-7tV9Y^xzchTPmYA~TlonkTeE=v+(1z0cK-UEH7{cM2(e{@HZW$K?oL}2H zwht-LThr_sJl|Hh&ySLCvJv5K!)J6$uKXzc5liURz>};3_^3$|m5@aI*?Qlc`DRjH z(F`HT`m49^iX%e$EA>Y|iOjD8pCa~idp~ekOfHlhRaQ8xl_AHKNhp#=Fgg=mX?Zzf z^m5Wczq@0->gtO)sgjV{L%B7%`S)zwKRS);E6J7FNp0TNMe)rAP=hzd4)#QKS^k=H zryVn3@yDdbJ-x1yh!OIR*w%?Mk3-LzCrij12XuZJh4UBZr)u52hUI5~Jt*yw^*(VL zav?qaw*Sz%{aGaoRFDs7<}=UYQ|i~DWw z>CJH@O?(#Ro!$y#6Q7XFY%IDU)}irYT^91r6W{Z#+{}q2q3Qvf>wlLn;h=Y*j7OeI zqvVu`^G)-k1<*L;QYyZ2&17HS6gcN|fy(5|rPW&e|NiY*f{_D--=hugcojM!UV-&z z<9L3}x$nA6(pNVb%8&}L0kpc?Z9t;0-3|5!70%gn7?B)VWCu)`&i~ICuAb7rOT~nI ziqx~vI9vfE4T2?*%?075YU7QYl@GqlWiC};57inh)NM2TIII%i>U}MEB473kLkQ~e z0xgVAi{UT!TE0C8%)$1f#LxmjuEs@Hk&XWnv1y=J@&3Rsz1`dgn^7P~uSP^Z^QNTb za-{QihVuvSgIu^Nh(i7>V75le{cE9N^>_I+MS*IWj@h5P^JZn*4qe(vU#?)eM;~bN;Rb(R>7!cj-Q;hpzxpN^b@aKdYox8Xt z)OAq7d6^bSeThTYC(A*Ph6l#q!9=zT`kqi7$yy>W#U1?6S7f;0dcIAD=#wRe-FVpM zV^uzf4ZH}vYcr*L|6617zA?r6T7A!+9a}$MQp^JwBUgt)zS$t2X+X9_y{xK8+V%K{ z(1mA-EC$(O1nbd~SEkNjd=6~6A@1Yg63eW=WI zAL=~?4%tGs5&t5fZSq)uc1}G~nF5TpC{zBFQoI`UdUvVm?4C-Df-*2@6FK^81?e9cZyTwzZ%y$t{o<~rEM$jN{Huuw!r-2%is1|`VpU4v zM>-gU)5|z8W92bfzhZS<@7#Fwci##oUwf>k$Orv=uQpSYqO!z()~jiZrkcfVni)Bt zhP}q9&X#Dc(>7@HQ!9>QG_ye`@tT90e6SJk_WZKIRBP-W8ND3r6)-vKwSO{_n31q7A{_vT{W?+1pmM zm_#{(3qyS{IVg^IO+z+2*8;1AS^sw>+V$+j)r9*eY{Ec;iaou)lxDfnFON&pej?;# z)w8CD$y)$+a8bIs2KcprMu4-nEh_9$KJLw)y-vWt#YUkg?A|-EGxT6N>0wTvLfL#G z%j1B0Uru&XodinfTa%XHikZYn3Ja8s=E5$ObILNxDrSy8D0b$s62U^_YfCcdRsJdH z`Zx*B3ZpkZ$5{%0ta8;d`oAmaZU=ATe-+`T}E#sw&a6M840jkqH;^0UDOM zll%F_4IzBXMF6tYSJbVsD-T1wQnQlryRlKOFe9OpWeHm8z?*s&t~CjtEu&lHik~@b zML^Qgp=c-$d|qa3bpB-zWNg<`Uqge=F%ry=bC({*--+q;zm(gP!4B}v@2#oJom;vy z!^-d}0lU5YfxMvWX1(_ff6V7sKA?uBDl|wL{~B= zXN#Y$3*^HZcs0%i%(Y1eF;&7^*0iFibdonm-^^;CJr;Vzg3p!9@QoD$N^IlaUENWq49>*HDW8NSOTygF}Ed0T8VCr(H%4 zg@^Up&~ud~0+wKg5;{&mloh6^#Z?>+dU#3a(Kru^xlY!8tR9`H0cT~k3Wei5&!o)!)aipAw=J?*Nv3iOcpYUoh*-T@ z{q_EKl;Pryj~`dMQ65qFYEHb2+l<&=ejxI|-%sxuX-x6ycZOfht~+}zYw`<9!GSz zXtL9iTl2qwBbHU5PBwDcF~2!_-AP@oI=$jc&(E$75B=~RHm)XY7r&wXRhDMwr{&*a z0i^(tX^FbGyxxxNezh{#b7V6$QLsQ<8jo5b)6fZsZHq$tmzFgT31)&O0npWGMP`5) zSkVfl5ox!?z0IrldWE#JY@ zCec+};#+VoCzqI*=SFJilgA$QRR-~eFA_A?~p z@OxX+6pROCWO;?zm9*ou9z3^jP`v2GgXD>W15G-QJ><9UO{tw zq}V&AyMAhk*lE8m%VL*%r)t`zOK?SaPDv7D{(H*rpzoVGsha!!#=zGc32Xxn=)E-* z3lUKf7zl*%TuoQi_l38%18FxXoGvf)qi=lPme!8gK@z9;%@501>2j-#*RdUWcuokeOQM;eR&>f}(k6=2f@4p^$64@z6 ziuafy2Zmup&pU#a-y-ESnCg}X$M60QuzuIVyt0lHqnKUB%_V0xjr{v)2@_=Q*9+dF zaEeYEi^hJ@{&4pr@g_2bHOS}mG#+-?>iQRLx8HZwSSd|?%;iNN5Q2iY0fwht3gQ6i z?{3}x2t6W_*AcU>G^#Ikho+tH;_r0L7uSA+>W6a#Y|#y$S+bhkw~>D1U0LtMvbRD- zBV4;7;zk!OUEBV5n8*^qFVYcE3owhP$c3vQm1b12?FPMIGR%0V1TdwC*{CVJ}Hr9e(iw znIK(^@7lAluU~HcH+@yl3P!9KXV?Ij5_8#LK=%~Vur(Mwi3+a}TN9lNgEC<$pf@ox zdf3`1U6mpoT&h&f0Uie4aYs~dY3pHu#3tA z3Wg*wVhLsZ%?b1x#CyO@%n){mh>QJ%VNEgY<6n!m zB=%GiU*9s?=m_qBY7UHlaiG5Oe5)~_PmmKCNuX`+*6o<;+zmDX8kHh}Gw^0}AvnV> zp;n_l?BD=IJL6VacIMM~Siv~sH0vBissS3dIG=SdR02eF=rXB9j zP6`yzO9YGatvd7gfLih1xC{}qdPPnln-I!hoZ)_11b#5or^Q*LKZ_V#X8?4>L1ym6+R)C0V|)JJ$-Z+CGbZZ~wPVPyiaWdO-**mhnhwO1;DYlM5N6I!x4jil5hEOEJX#G~uM zX)asvT);)KfU^Q}XH$KG2p^F?ha^ILsdHOf5sS_iL&OiT#>msI^=bQYQa@#I;}9=0 zGjb63&RG5Iqgl^|;L;D$5YbEna{^b0yc*XPSB2u!DGn4GtGZDvN2Zc5ilM?T|Hz-I z;0OR!sE3#~3hsRp?CVGGJt8kavGfo{8>y+EV$$sQ!sxpWSey0 zzkmNUELU>f$wX8_`04|yXw3gl% zuF&I)BgTFVp6O&M&SAK-koAi`Hs08{=E+T?c}&tPsh=ix+kBMEr1}b`Z73F@dSP39 z+r^;J|JHzT8u$~~T8p6RuQDsdk3nX>JJ0h>T?j=d3|BJTZ+M2D`ewZvW3A}O7AHjj z(8Vy+NWV-}y$G~Lr^7kJCbn$|bubPQsB51aoD>Csb&?X1)DVpVOjZ2Aw@n$C17nv{ zgYO1eXO>Yx*>svtg^!Bk3dMKEjKcMuDgWLw1v-uf9{R=Q`W>w=V@_32s>)7pzJLa2 z$U&gRTTfARK~Dr3`^PmM{%q@M_hIL&>9OI@CEpA>+`lo-+!#?H%M@>}7Z=OPM+~BT z$X-CkF^gFo+r?W*F4zIWPpDEDcLM*z?AV|6UxoWvv2L#k3kdewJMcx8Mu?JS6nzA} z8Nc^$pK=|?CIRhrQ^x`!nuv0wQD17^csh-kA1rhZm_Jy#DhQ;kkY#CYml`YL{k}xR zjITpKzv%D$NmbDQR&=2tAkaWmDoRPHH+v<{IHNxhzQqF|6FC?Bw8R~-D|vmLSUX#i z72+;bP!zQ=MS&0a5`C1JYH|T-u>a2$Q-u*d>mY{KQ#@vWFr|B2IN0du#9oH!l*_mo zszuagFFfT z=b=x?&`AfNBIjMT;=>p@=MZz;a|%5t|^XphZ`h5!*t1%5VWf;ow~JyHLYzTp-Y@VK9=_Au-Zszw_Y zLA9&de?p%)%_kE4Bs6#uOm zQzcDqaym<|u&X#=!s%jMs34D*#7go5lrz+)ph>DhV24v5T1|~ad z&Vesg6Qmr%g)a+NYCt=|@RK1WOhEU#BIs#@HyrrSVpVg_jNr z=43k?KaAA(OxsOYCTm2RDIv-DY8G#$$ZBhko)Oh@Irt8Yj0yDUTv3>7I-rFEBJ`CS zHa$ae$YO=falQZoZCLZcY80F>&$8r~b@g`TJM z6V4`?R~oOIusQ(qdj=nH75=`nhZt1r-q`*AhrynNG~{TV zM-}(`t|aYyx4fy*d+IJ~dC{1Yg~k3&^gH&a{jX-0LB_pcadTYK=dw1%E=zR&SvDe~ zUMcrh1$qUrZsCtm9Xr{+&)P1{`j%tR?oyJxx!x1p5tWNLl?~KCKlq(1%NMKK7`06J z#=5qV1}^p71W}hS=2vDTY-7u7=82!;|9~HF#thfc9B?uz$e38OlxTD%IZltNyOaV7*%C zHlDL^jGNqe(myk3S{-I8w?;d@m^g4FFa`*L0Xvm#ZOKF|Y}^~*wCVgF8{M`^o&$%2 zSnR1(jjX7X)4s?8#Y>>tAp6LZt21w@5Vc6!K~hLqhowXQ4{y}M^{};sEpag^8ovC5 zEm&Y!ZZ=n8qemlg=7S!#pkO6)fDhK8+Of`Ej**n~ewegPeQi5#GZOF9$x)Y>PTe(JZX=g6Nx5Jy2qm@&db%aJM%o4I;OPKNn$V#rc-rF{!JXi zTPLFKqz(U}^$(Y33CyZy0bqm1e@zKnp8c2Ic=Jk1qG&;)u&szTxj{;ITe)kLNTfeD zrZzxvPVr)W5YzFQ%1fzFneg8A+HX@V&ca3<{JRj=cge0J-8RM7%ic_>szD@q@fU#|#dlC*C#$gQN0z?4SZep(U zY1hz63j({SpIRaQNSw@h8z+KXbX8QwW^_|K#O$YET4vO}Fn62tH)zHN?D$kz^K7}Y zAP`*>7V_QNF*lkK3W_GFuvwha?j9B+d->MJutr0+5rjLtNbOoRY@RPQqFoMk8`&$} zdtC<@>7~T&8@3QyaThk((Tj!vtZu-_>y*3EFP8MkpZ!TZYQ~golmg?W=+SI7KQ~fh zD9uI^CKwl38PiPqqfgf+px?{fJ@LR`H-tsMlckIx9&3xF-d+6EbXet%p~EP0Eq!pw z!bQG}v$S;nAb!!NR_Jg({=YRouj8d;#it3>Y}U38X3A29tpjI!DRSY|K>zUaFxToh zqYoK&#k_|PW~aT zO4_|I_h??g#`cU%JgzS$&ml+mG5Epe)z8NG<gg$ZltC6HDNb_|wb{mGFg4_5DUHn@1Q(U( zb!S56TBQFb-a3v*2Q3KtTTMXSM2HyJOa(6nbL0gV>>HDY4k9C};fq*a66UU+AR|sS zzRmfA;|ho*zj}!>8bld*%bZSX^(_zT&1IC#QpSZs>y`XlEx%=k`vZ5=V9G)9sSb^^0nnpdFeAR6(0Gj zMkcdER>Nh>b=YBP%?$q5sGX40tHhJ%H@>+P`z_EIh})T_Z0Te@%Z7)@+{;~Bz4(P8 z@KbRLTKDBvzdVspZY&g#m&l6q?cV%ru0NkubSSbl;#4)7lhvk8IH@aL(@CHjF>A6O z{GtBnxq%;pKknkt1hDNuUBd9UJDGZq{U9E3fHo?&(9UxN-$E?X%j{gvAu48g4ZB|C4Kg7!PxBDUUBS{@ z8X)aDnSADC3YVwc&r4P@_!NQOk$PqOs}gY-_uD71wcN{jA{dAXjok4JgdcI>6Ihox zp^ZLdZKi3pISP{qr^*Aq6(RlX8T0p(O(~H{R-m;5QLm$t076V{JHYVgrV)hGLB~Lz zSPi)?`lfUnuM(Zo-|qds{(O{$B4DnX{6vmOVvcQ{-1*|_L}|o%wqr3ihkU~w>v!!(;OkZ_j#$Y{rxO0t$iY|bEuohCHIFfUc6==QCDxY`R$4(?d;Di!W62fpmPE; zu-qKSXER0skIO7V8 zL$ouXG#3YfUOzp&$|?Vyp630QiEB2e4(tH~lVt4=mUg=H&+Xqn%GHH4TwCZyhKzIzkV#!m@MAj zXgo(*r0`Yqz6u`mXo)2fEqm*&A9N)*5TeiS&6UY1bpdGGDyL-*6;Z}bv^0O(srq*8 zl`IGwJZGJO!(Z#)OApy^Sx~(<28ZPVe1aHEU<1dPLKD;a0*9vxR>fv8xd@wRbxZrh`30m z5wO$rDbpdL>p|iQb@CQAY5f`udTT_=I4izT>+0LG=b=+o<5&f7D1R&~A3*bKffJOhlO>+CkX8>O;hS?PIna6r^<=rn*BlRj|w98+b5{!P>`(+RSWcr*yPM{_JI%@ zD3e6!*-=}ovW^vXO*?WorSHKwzQVm~AQ!vQFwl6BuYtFF{xwIae;K1SI<&hNcbGw% zz7kRN7ARwh?=ZcId>A_=;&_hU&+-_;Yu{G6IfRV!l4H6s+uygT4$lLmiWzn?Dlr4b zkHRko6uFTRih~$|G)#dUBFDF$D>_-BdoZU+WL~C=ANcafVsGWp=&%5a92&Y7_T|J( z%U&yr6I;My3&y^}`q#`&rv9|Z`-?>kbXvH_c(oAEDL&(bfjmSch zrm(W0fZ>mPed_+UvqksXuu4rSzxYb-7T6+AxU9(Z&twt9CrV{xnQ^p#6>m>QRH*Qa z@tO80W(FAVwgj`*pXw9GgXx+oVEqZnGtBX-Rx5N%T32pN&NAHJYxR)HXVa_MX^L84 z7#k=W!DTOhL zKk7AFm#Vg$>$(+XH(f0Wh;{Bk*>jh1VR+lG)C^1(iJgcImDiqSk}2-Y`adSqAyu~R zzjwE+cT3zxVz--7wFw`kVd_7=SYb)XV;e)Ut9XC^NoRff8(`UY7%CKA28 z$$paf+mP4~^>lH@qeIAIvkA9wC{4iNTkFW((=XI#spYhqP09FN8o>Wlj&uh}JdskI z1d+X@5?2$qgSnqJWNvn2gTilHgT*zVhJuqWxLmX`%Mg;CyY8guz%U7=mb zgl`H`;j=(PvCdvg`}_=*^}8iwZ2TV5XV{Duy_V*6t+mS06`_M{q)wu|urQi@)5kLnTY%haJZm9#ZZ8`?^JaMvx>zb^aANtar*B} zL>{1jn>1FWQ*=Ie6V*%orh3<8na`#%~C)%>60Ib!~7(wDJl`)!HMvR|kV+aj$mJelba zQonhn%>D5Bu{}4!Zx+~!!T_`e3_;GEefoz4Rw7J`p+_G7)Q>S}*42Q&EGPHjka^?? zNwME}xA6GlYbIW0JC}+)?9nx$NtopyIbYM>Mf05mYqoWWKZus#N0KK2t z8FG=`5egMVs{GoB^5@4>5J;LC6mZ7gTXM%BVFSe)fR6q{u&DcxZ1s$xl79O!|D<_G z+R@Da){L9nz1i4_c>?1}^5tvOZFDB^aGe;wBfi-2e@`3mCKHLJ2&J&?y+o)D(!O-& zEYRyvhsR1n?d9-{Azu;&n{3RaAlPnITcFOHjE@MoR77QZ-QiVaJD4EJoW;Ea`?LVa z;+<8U0@rRT%D;660N;Q!ihCw@ULoeIly#}d<>4F}y%9n}Sy9nk73FId4D*w7VfBZmF=>MM03ISJr5IZq9jSiDso5eVzi(L^OYlZN*S z_e=gq^ZTwcKA_;i^~pH+xF!x@Z&j%F;tOo2GpkBqBr8krZP}N=12^@#eh(!>jd~`w@P|HAtG<%;O;k!pk3%h$PzsYh&=6o-5xY+Tm>o)$$G}F+8H3j= zoy98+|E)2kk(?wVfWe6#3t4P?eJ`wdehQke_M%j3JlFL8+~nYD;q0A=9|j6@&>VK$4O>;*|%VvaJNft3SDx5RcaIZ<~aTMLamOybpU695TPb#L6 zB)&8Xe&tdhp&#iS`9NU->eslOkdq!gRdQ>EgL_6}d*OJf%l2RYTjPKvNrVE`Y5eXU zIhg^);`k$4BVgvTX@&f35y+Cjg|?DSRui|LkkwFtngvVvR7*gPtvs(ml2O_%mlIZJ zdEk45k9ZTdmP+z2H%m@led-Mvc#94JN3727`dR+69E*Kei_4yaXgy{4-$!loqb>Rx zyEY|(7kmsQ1En_+k0d6fU-FIX2+{?;A~sXcxf@voI#&m z6vs2DZHz}gZD&IncwpVN=Z5dp)TUIghKzLA_4fog43{(^AtyS}F^by)FM zkzKqlQez`*6(FEapqdyzM4KlM&PJU+fLXyJ%~xiOA`EC685gtaf3TPeEw*2IyA+sU zt&>BEt=E3H@0m8Odb?uH$EJ_K{&87AKJKA|I{2YK=DK@mF7$ctwhfFobKiP6mj@OQ zv<=jHiY4M~4CQf`j|qvtb^=y-%kh(45`+ zlzfSkNF*VOg0oK!m|y_(({N~c-lwRcT8*Yn4D3wG3=on(pi8tevuzdGybWjJfw~P( zND{;>@qwmwrrE3!+JOzNgSEsrzv&%7{&n}K*dNyJ4i<80q^cZ;9rrIQp6gwzhYMoh z;{VocTMQ=T*#2<&sWfHd1h7AFkD(8g6wJMJj_1xX+$wc`O+l?(uGN-M`sOP(?ERH7 z?B#wMnjV`RCbWQ%v`amfj(Ors%ljWZcaLN(Y-f^#g;PQ`0rj-XY@CbOYH4nfwcz5V zap#?29*1dPFAdIMsm^2>iT5uJpDRrs6E!Cvy)?e`jEP9OX!StKbDON*p^}*GvdPk= zYDI<%Tpt5BT#p(==2UI)x!Tim#bejYTGjs2u(pXTitv{-c4INXOU?1>(f;MN%RcrQ zMjuS4#cm8(c5BApnMfQCzBdM|FfqM!z#h(XS8r_N*@ z#h~NF(Oyc5Q+bLSMFDs$&$ZB`vFL?&=_%T#_=x*Y+g5{OW88~E4FY!j>$;ns!pCKG zGZ5Ff?Rrh}3(Sss$6TAkB?{u1-v8EoEpIS}qq~g+LPQ*b9xB73*FK~kNkOJX5VPK=EYD1qVQ(poG@Vj|y?UX56M4qoV z(Axla0x1b)#y=h~EbkQopqPR=!a^7R&B%29<_ZI#(0i@GU9E|PHjUJ`%te8%PAsr~ zkv@aPQxw;d;^B9z&LEcR=bj7G;f6Cmf2Znifm$q1SU5P1Oqq;?A%ghaS4f4a2*`2- zhBiHi05#K71tw4!gw;GRh2K*oZCV-v)S5SeX>@bt0kFwiAr-a+l16B8)YqAJUD^id z!?fc)^M-q>_;2Dws}84Z8qfwbI5mg!-8)vy;*gU_1(Xvpkdh0EjF)vRIW|NSQ0sH# z@TL0$;m*VFkXO?YMr?^}a}84nFbprAlKMoJjmtx1f3(Y%>2l;R>-$_r<&jp3xrXw0 zo9sU`JJx>z2{lU^(5Y^|Z=C2PVnjLxA#Y|X^7|`V4^_P&!j)FQF9Jh-#YF=w?4g$4 zm5k6#AO1Yf(0hT0UaaSvNA5gqAM{X5OD|X5h-R)#sx8cwAzOWU|h_OLxM$ zYft4`5C-c*&kANgkz&y5_h{CoPI{Rd&Uz|$Unex<0#rG$Fp%>Gj7%Ldk{0QP;vN|| zQ@(}W2nl>w7xldSpNdUtc9G$50l5m*VnyvG>k!q&*}Q9_rk#6>3dOh3;*4FFE-qIy zl{2J%ms!1PYkbr=zz-Z6SBd3D z?^t3qdm;MQ?+62<{{L7h9&)oy@@L#;YYH-SkwHut~b7>=$e{KOxx8} z#@${jAvTZwJ`-`B)@rb?>(-o^*RBIpzaip^*Dl@WLIbzhW_>Rmm6)V$8ynRM4?Cx! zJB|ig*HG)5$Pnhn1u-a5J~FepVrrrZ<~nd)O&O)^qQ_a za&A_5N_SWA43)s5z5{TMFt0!w|LC7S(y1W0)>W>XW9FFGf#gWhj{%4DdR4ObzXK$% z%@Jh5Z_=0cDP1Li11eoh_=1C8lfIrfm(w-%cy)T@F^GM2ef7httx#4bs#hBC`@i4i ze5Z>}IP3ZvppUG^w`psA|G)73+MVHGUgKEs;a{nTS$F3C5p)|5gNpiFJ%7@r^#*cc zAjX=E(^#f5)R3QHH$l4|({L-k&q*BH*K`rzp^+ba=N{lRs7!{kMAVLctFu;Ybi1{0 zQp1Uyfpg)wdKCOp(S}}fkw}QIf{3vmj%|HxpAxkVz7#iUrOx&henqChy}=rn!t{#= z&H*7==P0u!Yj2tX-A{;+$|vilU42)+=OpU%W(Smbt0|Y3Yx2qK+EdQaTXr*BW)f8# zP)OqRVq{UK`8T^@4WNYsTSV=}TLJK8_=yJY0WYBSGmBs(#1X36R(Get{fb8~rb2G% zr|W;v3C#5l;w+9TL~!NDqql4#H!@g2F5RmUzQ$#QxFI^IWOk_)IO*gmg9I1y6#m6> zA|A1gr1nt*k8dQ>Du0UddiZv3=kG?54Qqcilr+sLPb5Y~S}F>bLGo=A*Xeg{3u)%0^apk_^w`u-nOdd@v zqM`>VzLP4X@CVk0ebOu>BK{mSN}17{qr}P0_|_Ur#hx#&s0P&9EfyDwqUHut6y}56 zA~}jQm!|Lh)B~34`x%aez^P^&@!5!gf48TkCOo)l+-)Atq31*-y;QnTcXx;%C6k7- zlIqu{2{jExMlEy@fC*wDU}(}~*+loev@0-BxR}3!{Oj`}ORaUudYevW7ujy%x(%Nc zLIlo;2x~7ksKvqvCO1i!OI!eiFQ-#bSJ&PJ=CCU5LQ~_^V=VD=h93z;rDk*zx17+P z-$do9_|V@)VT8gCR&Fn0(Q-rpO!)xsvpVyX(r$b3({gQf#u-EJ3mVsF2R=+lW{MWd zy%MY7d}Jgn3dG#J<(1lE!NPqC_sWW z(0C+E>9^_FJoB3`ENuc+_QHCNnbC`)Wz*bmIXCUeND8UD18OpBuvh$g(m4W?Ph|2kKFheam2>FPgdMDnT!`KaWN+L3BfX96*ATq5v#%xyYOi|dfU#a#shV-i|KyE#H2AQiE`aV zfW!H15|&qnBM%>X^(aNtA(;EYO0uwk&R~?&=9_%-*;VGSjgOpN#s`Bkkt6U8t~p(7W{<_?B>iUu?j)ogb?@b^ViYH zk{pUv@DpmU8uLjXYl)v87;iK>8hMgK@Iul`XtTEh7qc;hkGr3oV{0q^^iNd1b*0aNrqfwAShL2VzcBLzV`XFXnihDDN zw`A6cR-`P|yu644huY=(p+BrF2;?P{_U`E!oQ|c7wMLS&Jy*QovG1RwrzOu+FjH#8N_=U z6^g2Q0-rlxlKn;R4DC9PxYP0 z@@P7in@wpwlyfG#HsH>Fvv>R88~Wqs-^QiM?`h{L zjM#~H*l4Dx7d7kYWL|V3A;Ng=0^4&kYukp{Gi~p92i#5l05k3*-PS_*_3Ed??=u#w zxX!89?Kz>o;S6pKPazbg!j5WINC2PXUkv)w@y?mD#&MrlRt~nzB%C=jBLlH@*2Nur z>7UzN{y`)#ONmg8^Uv%04I0PXHfKqfPe4yxyVXCNhHse<@W!}mTv1kl!2{&$>61x5 zx3ewv9OC@}nHal#)W!87YxylWY3M*s>BcObeCGW0ALeHVbEPar>8eBu2!R^wV<-Qh zg3Zdh!FZT(YHncPV&Sw3i2SS~P23^&xzK}(Lp3Ps_9Ao*Ofl-qMy=P^7rE=7s-!I{ zQ0sVCUO#{Q4^|ecFrEwUm~8a!!<%pzn?s*ry>q(|FM{ma58_H}oxX%>$uUC4qr>XH zrBAQ=bVq4rQVhh1xL4$Easqx!ZEf<@76a}i4dsE0puHkekRgROee6#S`@E-xKNB`CfjCR z%=@>C=jvg76mE9ulzx%>uzwQhWyfVF00Gm+>$AwhNpPIp;2F*pRM|(dgQdULR904_ zbm#W@Ch{xp=o+1}y?)uKMhBx;B}(b{Zdol&;9aOWNzVHK>vUl!A4 zN8d|`gGSIBTzbiDR4{B^O*JL=ocN#-qs378^yH%ge|sW1tNRbBP75WV0gEsCOuf@M zNr`8TNRD*gwKjnr-(ujxhhMIEm`zgDFPr6PG`qDMYXKz2z;Mfc@55iAK#>Sr3;K$qU^pG;rYL1bA0SIz+GRbE^(Vw1SiY-8pBYwF$O znQs61@$S32-5qrxDy(jGtB7>Coo1`sM?x$Kg)B*ugq-HIb)=*%2_=nBC5)D_s)*LEH2=q1xZVwC&uJ3h=^JS?#k>< zhVy27zx|Hn6_*Goa&8?L9Tcmte3c*8+T$USUfnaU8bXW~!@)R%qVYa9F>3LMY4-ZB z>7VMqfyD0RVr-!3!H6PZr8u(;7T?0W&h^fiElI%=LMM=SamCtm`3EURvlfEhkR+8b=&NrHg#$0a?pdugIoIC z<BocA7`>tRY9BK(r%Y`oe2Ir9 zUkb*=QrFq9xksM4cE$sD5gNyOBsKNZF6a3eAG55vK3nZPJX_GPjGz5x*2>0DmgBZz zD%%?)ZaElGYxmp);aVQj|N;N)Mg-ll;fWl}G28_9_3 z8Wig!$qg^?YFE2yTBpN8ff)m|(e)kVS9Mx zRF8}UtRG;UZ_M$Ew8N!GHqYkaJYYX#EgD}&s*VJgUq70vdMT|PSA{c@?1su1noCov zLPZ{#-|@!w7$vog-nNI-C|LI1$_qI0)Z?O(Vw$+gbUul|gWzn!ld+Dn{hj5JX6>QV zUaY?8BJoE$AH6IXlfG-$Jc`-j?U&WxGk~6>y~l=zwzM07W|!QIyG5H~ z)b$-I|IdlAeHQuMD__cHKg3SJWP2rigy?g}P2^;lMB$yV$=)dZA?nk3}vumbh)D{Ae}w zEe69Gp!;3HBQaSzjXl(eQAlOfOQWu*@ru-ITY~Z{Rbq+%Xxw4^V)_upZR@|Ku&v@8 z+2!D$Cei+d)TsM=<4x(`LH7-ep`2+m0o@M5giHrD>UJ>y?@ByuAE?{GS!rCb46u=J z;XxQe2kuGSD*puqj5#qNvwM+vF1yYA<9tmzYwC(eI~0wR5nzs{y#Kv?&R*b3AQ(ZI zTey3H%;V0s;L@eCre9U{yR7+hi>%XC@h%+T~_OwfZsrXzc0V{f17O0jw*4uAn8pZQ-l5+{d3F|L+? z%JlaMPMH&s4(P*np+SAvIw7NdfmEfsukii;(b(;J0-5Vl>VyXev^D!^9Px->e@@RV zwoi|OB`HIc#u)>>2Jn0^M2|0f*r|AYf;PHhL6v%)2Drq z5Jku$L}%E>c?0GrAqPq4ocsTY&lP4(gt+Y8e+xC~pxuk`TR?u3M@$xaUBrx|tLe&JIIjTXx$5C!ENqk^g6>vk=>+W|Y|9S1YW2|`(nK;~9RvXD#S07n24)f* z0uT_f{ebS;ld%#uv=ng;B6mNI*HXt0|&5WCE89 zc<4p4m}oax_ViPcutr6qLwZrL^GOUC0dava;KrwA`^bg!Vf zE5O_oj063*;=xrKPF^p;d9l)^ojyOV*9IZuk3hJ##r&PGr0=xAZ^J@B$YHp|iD2~V z>Xzn{XE_!doG&O@DQZ9bWB{bfjMGm|>8ebT=@>(p5!dw@&WZMkIN%Z>l*vuLhXqNjL__q_DA{Y8 z%;dXVtN^=QS3MgkjrKG0gZYAr+Ni6+@~jSSBaH4HU& z_VOT|)MHJoDkKF^M>Fnh&5i9MfHUwWu93xhs=@9ZFQENIbb5sKmLEN+{}V^c-b{)d|XILJp-&`$eE z!`MRw81T}@Exa$8j$DZr4*a*`4#n-BOPb3eVxySN9U>l~<>-%OJK5fHg$h{E#vHgq zV&{?d4TUCK3C%tM%%H7_m}=){i<+6AqQJ23#cf?iA_?kdtpbPU&U^J`H>;IO7H*a6 z5J`Yu=B2Nqh&2L&UW1}F?-tmrE&=r=M=H7?*c&o>DP^ji^lEuN$|-4P{`EI<7`A}n zSk`<#9H-wlzm?MpGr?Aa+gCg-n_*S8ZJ$B5a!KEWol#e_mrx27Y2w`TxCvEWD=i_a z<_QuEDcIy8MLzLzM=5W=k`zdC61R;$rlZo}Z6dp#YLH_`8 z3`A|;d0cdvnGB&ZOwbz_MIf#T4A~)|glbDG2y*=2)(;VPbA{-)JuLy#2sW6`Bo)-A9?V3fW zX(U>ZjW-(hqe$n>W58%}_Ak+%#Fk}i(81TH3ky~_#!XVsZoIA(QUNRtuqQmr=Ao$% z*MF(GyWS$0q@2Vu+Ce^XJFGHD#(?7VhvBLL>%AhfmDxe-`;`Z8)>1hOBh1_1aROvd zRVRX{B%yg;hW_az!u1>VeT^5{SO zS4394$Zn6cmzcw&87-ta~{3Ab}G18ixnA+Cf?L&A~xx-T@fv z28C0`xU|qMu@gvp8*0d%*DWVO#^=!=mDfS6SaxLxctD(+I>du`4?sF%o;OKfz0t1k zY>=zeBT#>8R)f){Ofh@qVgD@lbKRZ08Y_H=F)+F*cyV)gC`|_^2N}6Xbe`iPb7a)7x}LiBXILE~P)x@sP~_(^gljqnl(Q4(?2D3AYf&6tTFbTT+| zX^Z(5j1tB38kPTz?b_k@kaCosgf;0&pLH;+;Ex{}61CtM(MdY*bvXD#`uEWTdKUz7 zUFn6Atfr{)y9&=ACsx6w6(44_@!u1V|Hqg;Iq)%>|r{Bn2o&(($iKWi9`fO?y$i-n7&i`ayI%OJZSSDx$x+2Y;>{!|I+L5 zv#8@6ji*ik^JJ1Tw7<$h^_+4}#S@FfTWn>K9rcIm&Mh9xMNvn69$VJiZ;p>f+klT7 zXG}i5m!QiaFcd;}7n%gi>mp>TqL`Mz(1%9U|3sNC)S!VjFTePKv$P%F(5{LHph@jR z!N30(3_&E)2cxh^{)+vdqlP^p*VuU;9UX*@T_ihQxFZP=(4n;?LBgVsgz_QwA4!EA z;orY4LO}aFeJpV2d1`raKfty3sh0VF%G_`&f=i4Z7fxYSfpbqw>EQBte>v$YS?DfE5TryB9fb zk(rX|^Mf-vMtyFvj#eCz0jyW<&N!~w@C-HLMno|vu=tR4Scs8Ih|ywP&_|zzj%ZoG z`Z4hh*PVX?5dDVW{FS*+<>X>VWhHFZrn_p2Cb}wflo+LiBwwlo1j}cEapTDWm zS?p=Jbu)1yq!C#d=XSLDYFfkOi_@+<)%b0+4O;fiqrnCF2kAV@Y4B9l*iJVS6W8>? z4%fr=V3wM;A}){MEgA8H>)fr@809Fo`P2{Zm2fofh6io1f3JY|QGgeaXkG&q-m;=i z8Z~1XDitW&(JZTo7VE*iH`YC9G1E0&GUe9QkB8Og#D4pqV0$e}dgD);?soWu6V5!P=w79?R6_&44xXn2H8%r_cuuzW(XAKIgUX!2m=Fe8 zBHLIka35v?4;onr58WQA*jfdxjmnp-JI|j7&ziK*o+t4+vySXPY*?W(8_^TI2FBlH zR;Qw^iuo1@*z}AFSl^-h?%Y#I(BOU*qaYuW5I2Ku+TH&{S)m^h@YpZSybTIp_Viq@^mW+0;L{!zSOZc88y; zo+>mmt(Qgcn2ekw=$BMA#Oh&;;o^m(BO7uRmX;6e#tGjtls8u-$Mcf3u;r9}l2@Y- zm?HlIz%KygIdC6rr?BUJdhMlzRz=!3c2y$#D}+_5P`rLDQLc7-3;%a9 zfed33_%ypIKM6Y0+u(Dh;R=Phmi2P(?Di zP6&^lZ5`>a8dXbQ08nqgLY}##J{du}rKMw9PYc5JVoL_D$-0X|)kxELOBowqBG6eU zApALUq^F$Z7T0q;r(F58 zMnoxia=9XDXZwjD-DV@rU4&RX2EpPS8-Wg$EO6%U0%R1)5W$(<&m+D$iMI4J;50nN3;}Ml{qLc7v$U3Z7b+njb zS&?>-ofGmc6v8a#diSmgjs?f2*0LsQjQVfKVf5^Jq(XvpZNB82NXGHLiR~?|wwy+l zfut^n2hJRI( zgh3PEB;;nDeHlHX=!F$@IPOH31Yd=5Iu{mlm-;ZFItDr=dZbl?T9=XU8cigkhq}O zx{ei=f-?rDqZDJ*>BFY(o!4vsTOm=Aw$H+;Fo}xwou`d+L}M-yNg|J4kl|l%B>NqF z&#N>uy>#f-Axr*sd;21DKUf%B2;UTkd+lXVu?tKfBy#fBeSCiP%S{$@+{LU*eShD* z5_d=0HO5>mk8~T!KZCy4J@sumqwl@fxxbKK)>wphp(gOe;SxnJI=~!CE&SOY5PAsr z#cu|xk%en=GB+K;2gn1ICD@&yfg3Pl@g_Oy$)a_JAPn zTiib=gCAQ|p01%gw?y$oE0Pw~6)$Nz+Zs1CX@2~EbL&udXCNa8Bq~cbQ&w!|`b+Tr zw8kd_$^RCpi&u*R0}>h!mXsF$JXktN%GiQQX)(_mv#%IU3VHFzks&h}qjZfJ`5s;5 z*Fe%yoDdv*ka_m!PmfMx;Yj()l`E24Och6xGJ)8FyPslf%-5b-L*!>##)qm+eb3Ua%cV%DkczlQO+Afc!wAL16lO7vG?)Q)U#1 z^G1Vf4)VA~Q*pbX3>|HwjiECptDL{`KwZ}0eTeHF)>IOZ7m($jXBmh)@{e#)ld+e- zu)F55{Nh%N(h1d@zj2q5^E|f^bR%iA!d$SMQTR5!E#7BHn}BpQwWMxEzq!P@eO$YK zC0;2c(J7Y>q6k*`ym=B~Wbn$D?^?a_vg?K72n$ zIpN`sy<&gA{Dc-ZXwg=_Ww z@&{L>)hIkC2X(+@OVhUkYLJBthzAqu7MGsvO7gn{Dt9GlAq`19oW6ytmJIqSVY_@Z zl^C7dSQqiO_kjX=O)VKg0iM{umP5wGtO`&=knEI?caL=}Jh3~IR_)9uH4CEuu_<8JqKr9Pf#?jZsrJ)LfouglRi!1mg{B zR|${C+1=22^TUUXazw!#U-HmQ;%14ksap45sXrtw_^Yr%cHFPR$=pKM*7zjAV(ClmN?$r2%>%N&+vtg848u|Yz5uno@NE7&g+XCv7nd-e{2ZI!>B=k;i1g(tA&^|zFQiXA$^^BY}H`w)E z(oHFMQ4A*Kv=7nTZ8D)*_3@5`hlNJQktQjOG8`CPs@)j=+Nu3o`3csxp3kK}0e^o4 zmJ7_4Ymtvb&Iw*rVjB(Kh->~^F&G%Q?Nyw7&H)Isb!Ct3tZKf1S-FxArixlNMo}^f zjTGPE+^fn(44~X`TTHo~hPgNs*>@b%GZL_;EC3S6Hvm29-5F?bcsUAlw+Yi3mspqU z#RWoS875{=-IW2mbBDgvQmj<3q_su3nx7gI;^JsQd>W-J$5NV?42?@Y0mtWQmnv}W zDXWb4z}>V2$|qGldRAiSZgnwlzc3cZ4~H@EeeD>s^D8?(M23e(>9w+l2<-ySP>z27 zGxSr^ag|yXuh*cr2qZ-a0>_ui;w%SHMq4JFj$WVAU){koBERy*%#`AU+7D!uAYN!+ zyUTW;V5!rZ~`cZ54@@#FPx|p4i`e!(!TBU)Suy)bmI_Brh9J0%r!)V&uEE!}kN=4d#m@ znQUSl?WR4V#!%{}A_?3RlgO)J*k-O1lC-Qa6OR`wJlU!gs;Aq@!rse;~mW zS!pDCka(IV^jjQIv(`z;C+I7S#9FZS zd!(=BS&k)e>+`PAMp+b<$2TjI;+Hva9^|N~5Q%!cC0r2=ST{&A+v`>K5O#i{OoqH?-fr%%E6@-{1 zhru}0s0B6ykveppB;N;>wEk4iOV&UxUzR;S*z7zzwyFMzAhWC}t3 z*53u-5$G6#M+@+p0k5Iq*YCi-v^nn0^ow{jbvN`lAxl%R)^(J=m3sBV%d$x zvwLjI4?kwv6j%^b#Jj3%B5Dp2#igSh#ZBBx&3lF$VH`?XLBt{`tE!M_@TPhB6M)qF zHOZ%wUelCfzd&tO2xQpTm_p>a67V5`81+yg(&^%t^lt(PXT-~x2)FeXB6UEI8c+yM zdJa2m|9X_o*NROJky8Zok~=gh*JIljFgP{|=Dp}>2Ki>H9a7vbf=b}OKEEzdoO$#Q z{OwT^uZTn`#mRo=ojg_jengHlYRBtmE;q{y#)TC^+(sgcqey(0t-jMkH1JF1ak%(- zbD-)xl|9TulF^Md&SfP{IIud1>NwzuUZY(?^uDhHrhA|dkqh2K=_Tcq2L+Xt$vGeO(0iK2AeehVwOG+e5!RXb`h2| zcuonBp7@7EG4{kHo{8cPEtp@tj56Ok^j15XlMEW#V#?v_7g|yGmtC~bH$uBYy+~om zZcyD85rBcZmruU+BV-$L#CUeFvLgE@_#Rz)tM7K36d===j9-YV4r3#J;^-7-`t=2r zPSm&WowtpZ!4@(I{k0}=^uHA+brDJ`TxnVCg%tdb7MWF3gl$Wr`QaKZn8}xHGg4Ig z5Rvlt&ln|?jyNdT^RhY<40kgXONB_N!jHmc5UD*?ahLl8^5{SR{reSgjfQc`3(d#+ zuf1RJjYnI6lQPZT4h{p8`vM(7(i6Ys{6b&RytkQ38-^^y_xwlkd%$B)&Riz5&;MTt z(?N^qrj1d6@{-if|DX$i9x96G%*+UP<}D~~3+AX%N}MuEsVXY?*!*kXg)I-vx*jGN zWn8^ioiZ;?ydH)`Pwm6r<0&`za~CzzC7Z1{5wb8nh3xVpn>4KvhnsLBkY!Qe=*$eB zXfBJV3|Kd#k&GQXe-)fFF`G$DE%K9tTvV_`7});J*AxyRN<*e+j=0G>U@HiwrJ$P_kV0*cG9m z6bkLB_VhKgvwGs_9_}}j$l6-?B;l^WU*36jY~DPu92|xEA7S&%nn+x+YJ;5c8#jZJ z7~Wt64;sVJ2MckWp2j5q`?B>u=EqHdiEbtG8(+!oFgPVYHNNF|?;M_{9wh?|inHI#W$q z7xb~(NcsC%5lT6`ELbNuauWOzgjmUxuj8bET+>H+DR7!UP#c|C#MR9XX1OYq{4MF9s1T$B= z^cUwh3VQIcX$C~igWVMnRhCi%0%r`vfa3`E3-*RuD1Dg>r?|G~x0E_{VLA;tNEpG* zj0|p;(AcK%y&8!-gz`V4!e;IG!y7DiX>l!%*N;cDZ%3)e>uybG=*fH7ayhtn-#3P)Jg#x~`s538Uv_m=|o2;Z$;s^ks_{ukf%kiUxI|>;67X!uX920&D zrNGYCP}j0;-^a7W5KtwLGFAM_ZupT>kjK++LuMxpL-|3?2ez?zp1(EW3Rk1YUoIvj zR_njTFd7G`58$>rjd|CA9@Gc_%EH&7cMWI4@zqZ zwZmf@)-_-c_s_u1jX1Cy=4Pw7Qr(@OBW}l?&|66s53!9T`)M=&qZ>fbae`@nakw?j zbfi|6b%zZF47LBX(gZt1RKM%CT9LHs${ocUY&WFWi$%s>mJkHRjTts6Z!u+F^0@rJ z*(MSF_jWoW-aND9N#!kbyGAo!Vy#c6A@wOQ;a)HDkH}bkpQx@w>W@4O?7O|@hQg_` z82Mq0NweV}W2DUig7l3cC>6e=|5M)=o<=hIgN}vu#fQg$$6&lHufaR?+NhV2hB9@z zG6*ieQk!?J-P0kx^Y)Xb+bAtI6KF0VvXUk%n+1D6@jGOvOB-i3n``RZw5vk71m?ZA zyc`MYV&8cizZ?+i&|63DF$rE$G2-?0pVWl+j1tM0IM-hLJz0lhu*hU(fHX_vVS7OP zv+&?P-X+{%ZJxLQ_dPlgP)X#fPnvx(_eagY*4)@2Ah|YPzFf55JG}PUP#-VPmy#e` z7cl%+=>qMU!QAS4_Ptrcjpe{aDX!j}D@h++YMD=&Z5zW@XttPhhKlOU#%kxgp`HHN-9-z&hf-Iv zl8G1!hrX%$T>ZF~_5G%0=5gI<8kkbQKHVANR@pE$pH-5qpaQZlY+~IZz~vnbpY@)&{JuBxsCwPlVec@tU9AfhZ1G zygy|$d#cUcD~tK;779kzt);ZGB%o4By2>X#3}?k7{^~ueNxtogaqVYAUUPJ>nl<25 z_D@nhPnvT+h-Z*#7ElTFRH>B7gK~l8)e^$T<5po_C4YB|2&2Ey?g&lQsUPD7%-mCKYqY$_txMz zB&^VODR*D)bDw<+C=W%c;o;}L^bH4vKNhH(n49}A34I6>+=I}05r}lzvFwinQD8$MURqZPs%Jtd=s+A46SZA ze3LE7nSHqLxWP|kp4K{|Yr0%(8oQKJ2_Zu!jIG8I$O(c}iBS~yr_J%&z9_%(GDGUP z(p$JqF@g-+yrJ!iEIm`LE7;ecV{kH|_gMTSyqRz#&RD!?8AXvU79=!Z5&#DSFm#pn z;+5Dd0^IcusslAiS?4UtPvOHnZ-cw8WsF?r~Dde&2(?<%rx^TZ7vHWkB^=589PqxVUQI@y3}C z(j~{&F$&DxUGZt67#+c05qdz_PAvUz1w^4GEvYXgMc4F~j7i7vwz1`g!I)_a{0kc5 z2NXV|V+b@sznB#nc?t)j!^g$5;w6f{B=S14C;X+&{7p0Mx)FRF?KI9XAnRW6(S_J_ zM3+7mxI2TX=26_-wZd(6jYeNXGyB6*78e^vjS36p7b9jFxzqhb-O4Yx&$jy(PK^*% z;XWiDEyOE(G!DBn7hbVUf;+Cn7I|8+k*PIaUh1!Kkb-qwbe4V z+ZNPRUe{J@U8ZEs&B`7CLL(4=iNw4H?6kRGYwY!ek}WcdlRQlSKZNl+tk z&*w62IKG6Eb_FJeh_ zqCUhoC}g=&TcID*THjkVb&y@_{(g+Gp_}-kK^N$@L!y}exoJiVRGyG&I-c)Z*W}zp4gB*m%fR<@N!Uz559dJbZt9amd*D6{E`&S4>V}_))W31_ z6;;McFXMHpxd{r77}kGO-|==eqp;s*vBkouKL4QqflZZeHI=N;r_5@XT>rt^0W_D1 zuZ4ysh4VIoi^`spXn6%(iLI8)?#x@Oe}6eqZZs3S8kN^bn(hn@_(}O!Xh1Q7+Us0o z*|2N;%C-}F12to-0`!AQnr3IHAjbhVsfA}X#{}y49|4=^q_S8s9j3&_#jVvJ=i8^~ zV+Od1Zyy6klfn4Bn+f!Zf_$Q8NzPxI&$C?o_fFAM5&N&Ldy|mXHGSp~bvL(d1-4oy z=VXjSzX!+&iSVev(LAn7s==_8o#l!>TH&)|dV!kTqT4+wR@Jhd4;$>=O#tG%Xj*j{ z_5<_`B%NO*&P^8{#GGpx{oLXDSNvbLF{fOeA~GVFzF7RrcN(DYi>ztr`hqG<{Sa4hO45Ngx z5Qq9oNkSa!3-^$>PQIiS8ngC5_f`@dQ-vk-q`lM(UryPi3KfTfO4Qug7cwW{E{=XN zOUUj*od$3I1+U-JwmrYbkQHkDjDJjddtdM#Kj}4yJg)@W**Rs|>jFMt9Ww!;8^mN&SFrKxV>q(le_Yy#xB4uu5Nq{m2SB?{H zwpQnkKt@#}#F2;8pS);ol}FvijMA70K;RHl-50)ZvJJThMZHVp<_U`1T9H4nxDREw z6QWbPQd4625)^DT==ZFt; zmu+=rOEd;o>gxZ@b`3D+P!NzDaF%?R{U`WExF&gr9F&fc&q4`82C!^Oq)PX&mz$72 zJ&F3os(c0B*+hBM!jF8z2Z_eHr+wXRkYE|L+^p)h_WvcnTws5yf~`;@5!W|3cB$dD zRE3a(nwzB~3Hfc5&;*n*Cdr749i}>@Q6;d|cB}1Y&0;Sn?jtiNWG{NTLgk;aKywL{ zjB6?Tgys+oqN1bs6|8Rsv%D)ZRKwZ93WvCvQR(97Rc% z?m?>+ywb9#cY%{)B<-{i3az)sdb_dxt_CRVh>wNVUqyjy51pYYEJ9>{$QSPt&n-7q z$At7HR5fq!eOry^sCFPpHR_3|@KN)hywjx$BZI(^qn+sCFWe7HSfH^1NbI*pZTc*OJQaskRe$&-I`^DE zP*UXAs5&r2u+4EFuOSAY`)rD(U|9pjlleFnRr&c_O&?t zL_?aCMQ^p2Dr5->ya&qzW_u8R3?!M#PPSI(XkY1PO;bnoWG<~15V8k&Fg1ro|3eAM zX@yXVGTxBNcIe;4eMfQzBT!YAdUk?yH-|5Fy$k}3bO8&@oQC&%S!J^>*u!1Xe$PqO ze=g3bG8OyY73VTIGOfbQy^pgyC{XXMXqQIo%oR8p3!;F2Ms!-jZsBeP;3e_rmjlM* z$CB$ZC~20gFvlS~bxN{Y9l=I}M2Z>WNg~02@w%-s<)l#s{YBSbHLn<1 z3T?vi3lU;iIuL3rIeIn)iZkOVFv!1+(^Wdl>PNz100ogUlX)RC<))U13I%ej_+6rKfxdJ<7Qg`9 zhab}Vm8WLpxVl`!Oj2u7T*xx>>e{>777{-%x_BD67^8X4dEZLboDuZKAznwznAMnD zRtCXy+Th9A(r#-&Z|Xu1wxL1MOOnDeLTc99J+VI#fNL;ObjiR@neyRZqI8lZ`p5)> z63W2iv3scXIuR+6qwVccv2ULaV&B7)Z)0}FjA#)>PF^rIYx{)a7q(u=?=LgTlwVDx z@3Aydc!8U$=q~?a_bbqv-nsLhTh({LKOW&zbD|kK+y0Um-1Xi4*T7C^<5Lts|B9zAU!L{}qzSM?OuaP-~4~wQy6xUjv_NcMD#hZ7w zsoo{N60pTGdM7Cl8Yedu*l)Kx#?93~8DM?2Am~`P2vN<~_+H{+@A2|YV+Z2KX4(lF z15q+>9@OBl)wLqw%!^$wmIh;pp)F?H>0wVhS5`8=RFkWxF`{!q=(uufK zrr4*r3Y4`8&_8@allC@D%GnwJ;Uu z!-W$G8P1EChhDe*-5%KNoT9%a_r58M_cOL@(b3@O3{%;jF&J6*o@d77&MzF~QO`vV zlla^T{gY^pAv7y=K2J1$Pc>NEqzo8B8?_7IiK*=n&C@`-EsHeAI0l{2fTnjsQbq_y5m~*va~rf55U6!J%auz*;GR9Y`M# zDNkg1O*n>p!?W<9Nm`hwFJW^l60#Ec9@?|qkKjn&mw?OW_zUtkIZO32xhX34N+D5i zUHGsh^BB<15l&(5LKdI@M_Le2$%RO|<7?ia;#fW^xI83z3P+cK7ul)$_~qX&sFjI% z>o=@+3f!xW)knwCXR!y``5|>ZA#Ybny{LQsTVYb<)})~_t8Kvs*hOJLi^AVq_4}3d zm8NuXZJfy1M@i#z%bi-IE>cI6)7RQIl%iXuLHo!ze-a**wD`jv*6)GlXLa#VmQ=?4 zG}Lr(X5>!{Pt0ZV5z2kf-E%`UAvpQRPjCPI7{&ASi(x!UJ`N9>%1 z;M*@sPD7%8-}7@Ak9A)Q$YQAu{;$o1^*tf3lZ`yid&2v$6^0jf>0Sr^gEF`5xIzz` z-z{s*WJ}92Off-hMjiT++E04sCv1sR7qfhQ+kz7Mcv9V<#*DrngFki?T7SAROcnBQ z3fX%SEN-MU#9Cbf{D_Y38!dY2{&uxPoEsny{P>8}Vh^f)7~@~hRC8_&2(l;r_x=9> D0VyJX literal 0 HcmV?d00001 diff --git a/tests/data/humanart/3D_virtual_human/garage_kits/000000005603.jpg b/tests/data/humanart/3D_virtual_human/garage_kits/000000005603.jpg new file mode 100644 index 0000000000000000000000000000000000000000..21f551c3248b316f8b7a9fbed70b7ba1d6bdc651 GIT binary patch literal 225722 zcmbrlc~DbX^e!43TbyZIQBfeRpr|Owpp40J=mt?CPB;LPM$o8CMnDKdjx&Nl1QcXS zv=JgAL}eBv$`p_xN`S~1AwUcHhj;wlSNB!Dzh1qIB~=uMowN2{>s#OY*4p#m z=D#EUIPBu)f>^Ksfmi_lL(C5%4#GeEpMCvWRjc-?@A5e*A-k#I*Fs zPckx}W@Z1E|DvFf`m*TNyR!0%%Bt#`_YI9r%`J@9w$I(Hp58w8xBh|AG4A-pB<}}* zN<1r(%I4$>rRtYm3lKX0d&~ZpVgE0?R>5}tws4`&Lfv0>E%+@G{;#uY;qRNQ7X7*Z zxUL_5^=9i^i`N`Teg5{-5)+#fGx`^_?Kz_W7+>V!*2iovh05w_W#&5 zfLN}x06sjORR{n99UE814(;tXVay}2Caj@(gzSDSr`asS#89e5%_EoHiTNq6MJK6Dsq<{{fp{%Vg@d9DG=NQZ&GN;C zcJl}m8r2M73Owr=60i@6wb9HYcCz|hhLOlx$$OB}NNG&mE>jYpx3?FR^!(VAV&@Be zdxWEnyVx`6_Iq3=zZ%parACzOT}k3E>v|8^Tim#|gy2ntkz^Y}FPm^rGAdn?eqirGDp z-n3W&`K+hv?nYjG-cjz$J5YM3TpoM++6S1>GLg?(&MQvu7-XoqA1yKRa z;Xj*P6yONHBNrM^BGP+$$Fnw$N5U^gj;~@T9ph|MqC=ULQ3A#u>nC-CTaVMz|88Hp zLqq;|c2ojJq(Jo_T6U~Gb52i$0x_6IWXYbb8|yKa)^caHOsEKe%HO$f?HQq1#zII^ zsT1*W7=t;zt@hH5D$!zzi6J3w9$`T_%B>G;4#rPZGIQGJ5gE7~kkJFGYR4t`4*E7o z&*z3_YCLi!eKSC$Ky-=XxB<59zl$4{wyt<%o;nSunft`7jEnon)o?p0?I;5x=BbEJ zuJS3Dr1OX~meXMzDvwne=Ok^6iEqWouE#~qBLuJ~P9<7kf^xXucv?&z#|T(SDLL>~ zcyTd&)XE#8!RqPWoUpCL0J)qi6R`_pdLC{LnJ9sYwzii9MQa_x)kOyZg(dMXN4@$2JnUTr}l z&4?OR^gO~+Of7k3*Of>2?H7L10FI}E6Y^4{L#2YjT)l}~iq;U55WVSfMo8W~0zR4h zx`z>I8_$>AHB(V&p?pAp_0IeJ=8;6Wg za4_yYM4GLADhHYNVh9sEXB4*omn&Fzto*44detOJPlPt+OC_!Gr-+A}(PGvMW%}1q z?d(}((RZmRX-*@uXgQzfT*vL;56(bTlRNtkpOyp_8=9@6@=~mNh|b0i3yJIPGMYEG zb11%!k&KY&u#^~l^FjKg`crI&gkjIEniwe8A3@VqX4=ihN*Ulmd9u`}grFU+Fv*+4 zl9Z|1q?SYo$Ab6SkTh}X(mnRLwm-!RDx;+fgbHM9Z+s1;NQVd@S{LvMb5E+ZGuFP3 zv!6#814|cgQ-(0*$c{(hDaKrW`X`4u;NC{0@da19(s*R7qVDnFCN&j`j@$1bWmJ7h zq?xr%f=sN;`38Czc+R!e-;QlFLA8v4Jj#F`A3_hep|&TR#yk#yF;9Tx2@>f$YRw=2 z3MT=K8Z9rG8z)^4PVnT6q@m?{zC$rce9EmC{-Jvrgtl&A1gXN^!ONfu!^EW8RnA55 z8-4^OZ9?URVqrw^bO4v`dk03i_>-sWs{Y}~_mgOs391xLG?$9s=W=HDa(jmD@I2zY z=`UQ7kRKC#u};qQgJ?>ogo{FjDno0CN3ZD_?T$%hpySy!oz{o0)krmX>37h>X8+&U z-i2@}mShT4KMZDG{c`!yy>w%&1j@a|S+6?nvjYkNgjJ&gjVk;j(`QBeJmR>SavqWS zba*(fy!wcpS0JY*wZE}BI^*q7=g#w=c1b3v2 z46B{<#88xc9$}X2Z%!yQtb6Y@?7lVWFmXJ{M8+%yv{*{PnLt#BDP|rK2NCHB%~wr< z2qQR}SV*22&|dv|-@fbTi{Ob_-FbvJ@aWTzCCyj1abRFDCsh8sbK5Jl2hb{DDWN^4 z)|=bP!YQ)P9vm!DTN_?_Th(joX0#g8&N25zM@sEA=16H;ULr~1xJ0%cdTaSXhEj38GG-KDyF)19zn%SXz?f4bJ{j}7;+-A z10IG5B+snU?A+Hg`&A&NE6-*tM|EW!LZRz9?2&SlWc{qK9a-}T6lNAli&ej@vRFN> z1Ph%&**t==aVZ6xIpycOm+dp#4Vu8O567XjNa#7&Rt7C<^c zp>aO_rA`%38}WMXx1u^i*7q=uK#MV}H_!c&^*q_ze;(295XDpCl5WiL1}b;O=I(I5 zZkY+uKL7Lp8#VTm?@aSnP7{>^gJ~3jiKY3RV3_|DRvBu6r$*6IP2jUEjpc+gn|=I0 zi$OypEwgG4%x^=MlTC1Q3cevH$B@E*s05Je~%AM@pEtlw^=e zLtVz&L{6)eST-fPP;sdXn5fEwq9q&}-z$SZNeHY5Y;?6)HafnBH>8p=2M9!92GO3= zak{O6mMLO$j8wz`Zp9x%VG`*q%z5-ag1P>Gar-I@Dn@F*`lokW5(&H+G~?THZn@?9 zfc^6b_xIcs9-#GCWp^xU+icg0q$@y=iX{Z+X6#OvVK9=y`4>X>h%E8ppRhyf7)nPZ zbbiOhs_PWz7|qLh1ct>%)kvvk9dq4%w$!*7Vm+1}l7EJ%-zxE+b)9BvVzkk7Od6{@ z!GL>t{NSVuU`z#g-+CTAKT5{I@Jgoar!X{ejy?DFPNj1?G$<$!Tq)HuMZ^Wai0ZZs z>+Rvcp3?e34r}`2)pVX9T}2`~dXmaC!_L)Q9W7Q917-1)NjE+g-MwyI)h5C3=KlaJ z1xbHeC1zS&($r56=)+i+Z+Bxz2uKHuxO5~`N4#HNGCm|$CaCNCRmNy)f#>Cb9g&5m z0Jd?4&oWSrd$w|swcoNxP_!!p;Ayg#?jl^-5jXn|WCE&CCXA92&FI==H?1fV81r7$ z+R$7DNp8wG6beo5e%*-N&C{s#W>m+H7PJRjhV7UN#YhGF* zu6cy{*~N9Oy>;b0C_9NuN1;?iW7%2r*1;#^JXxzV%yPNXl0(7AZ+(urgb~2J%z3Qh zd4)SYEDS7y{7JA4_Q&GwI031@D9di)sOwpIp>qe`tVlE{Zj%{Rmw(Z6cA=~&V#p_k z+)%lJjax-UW7A>WCp(=o-#C4q3weRB=MjJTr##kDD5LX;h50Lj8m}uQ&P>P4brjj~ zN&qS8be{{gq_$gPp(=EqjX>z_>)WfQ*!A#cAZ*D$TdE^0;Sd7Ygr~Zws3{_q1XZ}e z*qe2DBskU8M2YvOY9ufWf*^~+K&|ks54iY^B5)++RnKX_qu~PEgb8Y!T%jGO@R58Z z)#C$PA7yu{JLTiZ1gUJCLezU+fXbx(_-t~~uZlO+I24w`ok!3_idOY35mfoJX21Wt z^7kOG0h1!0ocurWj5L0#L1)cPmhMUWqlsdV83dT_IeBPZ!Je_I~5ZLbh+s-jV+;vYQvdm4?Ogc)IL+p5v=RL47VXQ z5)HZzPr_ony3xWCR`n|F#O#oUqa^}lM}NUt`Ks29@mxv=OoHi*>~B%+zbk=Aib~Oi zdSkCa6}>w*J;A!APW=LsCQ(0hDCg>f6U?%bkUPP2ern4~&?&JCcH3RxN)S$mw5xCW z+*XYw)I!;K2u#&p_6|cDfJ}|ORD-T?PL-%X>YsOAgt1I*0aT1Z4y$|os+we+4~^^w zx27E>RbYaVmz$o zrg&UEo9Tqbg^su?K-CA-D;MT8OX0duEd7p$klnA-f*TeM=7MC{EbYHK`#txHoOi}5 z@h~0WPz#+Bw`{M{Vy)e}oWeM4#l4oRi_V^L#K~L3Ymwh4UkR zdEXpGyt`UMWK4srZw(|Iw#tMvcM@)!47*45r1yZ5!Yc){c;Z)T%C2{A3wZJ@|09%*;)U}*kAJ8QZ0o3RAsbkj(vW^=TB zI?e^%;1nGu6#O{;Iqc})gqeQdE50P{fr+TsXP~L4?M8Q~mYfT3%7N+26Mb$@)Qo5~ z%xb9xZl*c*u28HL$}*4%&hH#C5U)rA`Q&xti|?D^o{iv;J|CP^Y~vsh`QBs&FRakHY5bW4mR$ua7rDT&RKG56ZT6=Ut7+S5x4F`evy< zPJXQ>wW;Z>DmtqN&oOg6)LQSCKFzqJu8W2!a2jH-2s|~L^;I)wWqZM){;R6jz;ssb zIw4+fjPbS6I;!Tz%905A%oQ=xRE3j4-&Mz;1?mxHxO!8r;@^^q_xY#%%+0+C8%GFi6KMJRa+)8=uCX0eRtjH{9~ z|JG;86b+SjZ&!2b(3II3S3Wbe>E!8vCGi-W_8f=+JB2!o|LxQ+!<*v-N?nWN9Z_yL zlPaBjetxRy@#lHOb*$2#g!N0Q&wIJ&ap?Hh(CFY19;gkMF|ikb0`v1*Co}*<`3Esx z{7T^}*}F-y2%M<;lLR)(zDE?c>2;PvB09XK8c9psfrR|5MQF^hAtbqj7ybaSP7{ zw3+0v*0Lr6I7yNSm1OTNxt9OKR;C&fD>z?r6s?u z4sH8|B^3T3>X#?S`C*) z0&oa27`>h@mD^W=(P}R|N_ZNpae(nr1h09ji`9A^~RhX{a zGT`{El3W)P#-{|yrO$Zdq|oM^_RSbMgdGwyX)t!qBYH;)njZXjfP4PM>BGa5GJ`h`C);~E5CpBjf+feqa*x2ae zGx9Raa~=`JLt`<=rE~Uwsibqw_o~gIw`dBL%01Tb!CNCoa#g32jH)q~P))j023Hxz zf$A!Hs>@@$gEx*w&1q^FTn!`4%lGgRiOCosa}o#29C$_YE|h!Nj{0-w6@#n}kyK}p zpOmuWkm$up!^^WjRUn$vE?!?^Vy?7Z^rSZ%=t_`)OmsbYxW10)nZ8*W`~dQSs|M>u zm{A6e%NVT*waaU#>Z&6J?ue-NyTUN&-$_P#@)dHD^x4>$ayF-|pS#WPs zno(I9oZ@^iN)b$;#nVa^oJL@g3nJ(W$X4g-TsUSbx_vbK#PYbfz~g2vXQ8A1Zy1$M z#60!P)x^Ee<0sG;Kn|1k5;%J4#i|2zl{u6LF&Pfb(V=M72CE_TPim(y2_%gw!@QOn zpRZjFd4t6;)NtukqZ->7dT6MCCpVle&l`~L^nSGp=4$f@*LV*98(veBeF(xRFFV6U zTOY_^gwVMnhW)GEWyP)(S(LS!;xP;WG-rX!m-3hqtIcg32qm0^SB7-N`NI=Xi9h;W zm_#N7`xpS;LKhW7AZj92IQ`7wWC^Xc+p&GdO7f5c(S1%$_)-^5{dss)f2hG z(AS^KHpZ&O53yf?LmSB()2un%QYS*(o>mxsEx*O z9PBo-E#0C72sf=1DS5z3bp4YH4n%C;h#P*8U8Q7n7dM8?qU8{U0xRM);Bah)0tOZy zC7oGFtbeM~6C@O%>6SeSr5ht#HF2XXu`2RM9Og|=g_3;%*A8H?g|75@#M#q325PNE z+DQjA?wJ;)o>}a`_g*`XsJT2#soR~-G*al!aA%%u?jA%7RQ!*s&n9U zUbqjJq>h$N;idXl0^Dno0m4KWO9OMFv<02-v$(Tg2_%Y)m`6tsOZYuE9se142Ow*a zEDG3@9sdLXrEL%n$)0(c!Iy9ApCn|dknI|oaZg*}LCv3*BD&yPq@lk|IB?5h+hB}x zwe6Sp8VE~ON;T2ZlH;P29Vh{$%uZ2NQDA9j@QPVx_*FS*M?N(N>S0E#noIJx3WxSUAmx$(gZoE#&YGDuiXAg zqS_S7QbihR14D24-cWa4PrYX%G3{?b*AWdVq1dL23!5IupA8COORZRg`qx~#E!8G8 zwiyk>(@YgNsBcFwlp>iRH^O=D*)CT*tva1U$If0~FjvxQVbX+wO6U_=zgRkRmWv`8 z_N_VARvV30%m6oBRX^nnUgFn#M#LKu6F6&o!%;41i4tgjY&}txbE(WD?IJA8U>GDC zm!DGe(zE1FD4Y~vrp-1G31rg_uS!ch&c1lasnm^?s&IVe$!whRt8?qP#6i&dkw+uC zD*PWEi??6FB@DVIMsCb`soCC$>)ae>c<>ghsuClcy{K;5UEK*Ho9I1uX5@$jei-HHK#Mrg5s^fWesp|yd*8ieRF`D?h`n&i}c z06em^S9>}$L`dCLS{P|k0LNDvHGxcL$z;#IyfthlVupD}_S{m;OTJR;kS;tKAc`CD z0hE?xkU_ahHT2Hm%_HP!3PwW4taDC7)xm(lBLiO8vsfzds3*xR(#Zyjb2?@b8k4E2dC&n1IoJHP9(uG8fNJFU z$5r8ru2=mSNnc@Lz`&6gfvLQ3131I}-8R9+ovqPwjNS@ZfcN9cqQLA$h=OO*5|HB_rzI zg&doP$>(wydQwo){sPbbV|E&uR-X4jt1zZhxg+=c4a0nniU$Vz8|QM0+Y?HHg`-tv zT%ld=JmUGc9HjEWDz<%dG`&&pCRo0X5g0XHuR~-4#%RZZB^G}L&Li%^@#!2fHjh}* z83x^(Yo{v?@3O@x{tneW4xK>AR=Ow?ChVO6i9-;aP&3@OaJr@hM z#|7#qeHtbBgMdqLe8ZTPJ`izaH5U2=4txM4Ved6wdY<>`nVK)ZXM$Cgg?i3obxFEw zXX=Ab7b4Bad<{Cq;hs}|vvukn%@>~KOgtp7Tb9&(p#+Td2#$$$Jv#fdWsP#;R@2w~ zcal^qRY%1<`Ew*QRL<_9}9Ene*9$YxiPzyGr3p4HuXWhUBHhs zPqdyF=+lAvrR#n@JHd{%Fb})+6^)EyHSPVv`Np^6Z!j!FS~ii*-@|U-lR~Q_2cG((8y*d%Z8l-XB@^#u)b?*87#BaMp>mr<}HQfPPo%P`eQn=AFHot^F?4 zKBw}tj|bgeu&7+L#~thYKCtt^^w{i_dp`4h&HI|%!SAzPvXq@n-~CLKBkMxh(5rpP z*4NJdwpjK3dW_mXWw)-JiO??9*nPe-ig(6ni(2g4MTyN9-qn?W-z;E5&S*3B*~_vo zlpVPWqHxe&@eM~0gWw>`m0v3#F1y!w0e_>a8GioFx10-^+JzHOgWi6qaSujg7hd^& zd}bceNMAAV`7IOCyT^XI?qpHBp8cgd@t(<-yZ12%%RgAUG2Mso-QN_nX7&kJzyExO z2M>LvSTOcimc3023^d)jpsIUkkCx=I^v1|GsEg$~>{RcS!DyHnVx40%9o^oSMFmmbjpWUhUwlK~dQGi$Z86?0)|>mCnH)OM z(fq1CX3WUjW9Cw--wn>U`o-PSD?v6|ohR4W_nlWP?Xg~I_M1R z{JN868|57r;??H2o8sME>sSBo&C^(qb!%dcfps~!ulLOzJ0&|?@`#}?U$d;6FR_Y0 zkLb5Axz{xI_H2v`e$IU~WA!qRv#~DvV>7ZhFgfwXM1UpxCksDt9}dR63@J^ETjx<% z%dUtjuzZIH`Ykw(URxS-5}wf3HEp;>cM2<9WWD7gN4%F|pDwSqQ{6OAO_iJ(+zZScr3|9gm<%CtdBKw*11m+CrXUP<@|Y&fEiakGn}NqqULN4wiKyYJ_1~uk zz8E;CxS8jxh~tgNYiTc={CAV`Vy=OdAT%+?xnCo5P>4d+SYJ zYqphP-Zp+#@lOjo<4i*1F+AICCtCjuoT$E>wp=rCqrW)T|B6_*B_ntfOC+?-H{(Fe zstWIkMDh7S+45%ZS|5W_O9^G6N4};NJLQH1!682`i(#f z7hC>xQ=d_|^2FgVYs^DMa~Snm6}(T;YC=xRVw4@K5y-evsg=6f|L5}Hpn=h^VM66 z*?7CfTZ26w(#?s9_!;H7?V!$H-c40PUF3vu&}KL8;g?LW-eYlA}EK2L;@8Yw={?MC%KT1HG?|n9am?>kYU47swgRj!kI2}I%=L&Z_pmwPCg&@ZR-s@P`;ahF=I&ZH;mGz|O>hyHG1$F(o)tWIu zu13>+B-`v9_g|-uBAl<@*PfWa#ZH0`rR{5FlAaoxxa_B?31Z+MCtRF-d;nO!@Q-i zvEMKH^%AMyCOGKlb15fJoHQ8?W#MYvAjs6 z3(r<%gQKV8Ldl$0GxBb|#NAhpGp`w3iaFvv_J%%_nZ@t-%TH*x^18}u-z}o`oj@N= zL47)JY%_U104{O~B@mhC%zN4F>KsxGF`*-rmoi3>oc9*#Ic1DgoF6M1aK|=ueiwR- zFS#VvZDdz{xm$(!Bf=fmkhuf;qtCKxL99nsK@`^Xc7&%v|HQ~vg(Y{z*8U1hJ)d(L zw=d5g3=LtUyapy&&0gU;%Nx%q;v(KFE2n*?j5naSbu$*!IMBwJtI$dHI~8F|ou4Dzg2Ea0z()EOyR^wG3*9dJ}(9i`N$GmO-*a06$7uTWy|@=j?0){H$xK zR9=b;vg^AskGRncwwC=_ab$}5=i6OB-pId*=SY!lW$j(4C<6$`7QY;?8k}&dpFnBo z{(}}>wbp<03s|u^v0mPO)G2EWs1e4IrZV#}RTh#Ums?ey2 zITnFxHa#mXuO6cc7HQ%`hP3Wyb-@|E7f(O7pd5JyP^|cLJ+r1NNaO!dkWvkhWJR0H zhQ7iuP6q`{u12AGX;>?s1n{ZVoF9o9RLhty{bmWwhiS6K3O`NW85l(RB z56uPuAqllITCJAJNiY_e>kV_2Di!Qv4i0Pd+blSVJK83#tCY@g1MOcfU$LRBy7I>9 z2Cx`OWfy4}1;IA{(|W%2*+Gq%O%p7L0*dZPs!?v51%i^jvh4tXD~U48-R;|S69^}G~RCvNx(ZL=m-`H-0dtJQHc zaCc~GSxlQrI?p}ty-a2Z>3?YqPXqtZ@)58=Hf2a;-=oKk`(<7v}TTx9&jm+Xnx zV^M=@`Z&={mHvf&h!y*8vwiU0PYgV{K39bVn@Ar5Bc^dDb8?22^7@?hRi{h4ZNb@& zm>JM`C?)KA*+$i! zPVMMR@}sS$42{Mog6W$JAEI%&X#04`A4;r*RYkOpi8-aQ5`{`^H@r%tWe959pAscv zK1(2LL%F-n>DkFs=Mf1EwlRHQn7e0G4lu>mfWG;FPh8;sfY>#EGpCu;Tn#KJNScqm zP@WzbX?EDy=3DE^(nc1r;ld7Gfy|x0VfSF^{OC zvjNi8zS;9htkHQ|r^vMRMigS_TXG#z9&Jp3tL+`7zUOy^JUe+_LHc)g zTrQ-5zBLf;*iWE@5P{i$u`1e@@=Zi~5s5 z49N2r^0n7vVt9L=Rj1}5Y3VG;cN1e&lLBl&^}Cjcw9&SH+j=JStk_jlD(3J|_OQVqM38B}8yKPBtT+40`W!V=&YhE>=~(Kh z@>sZQ=EQ^+uBbFjN(eI@xbMgnKwV#pdmQzn>z>qs=6f;mk3ysgPGMB*Tmkd(}= z<^Ls{oQ1-Wu%M(RaH&8ku?(K3c(aYUx-}nXIFI1uF1R%Ye`3Pz_4jOyUYK)8pLN@QBNEzOv8TeA@LOM~kB>K+54R zDu(joC|X~lX+6ye9XblKor%0cKpW>`KUsW~bQ{9Lh_S178OfbaK{B&@Nm1tjg4rMQ zh}WcgRJ0TYG$!s}_BM6zKNHQ6j?#aS=24#C-YFX4^i7Vi+QsvToBlR%#nX5=9ao3vQIKD*}DIDVSk-i(?p@g`{U)!!`VlAm~?^edusV>pm^=ZsT{V|S2Jqiat+xfJ6-Gf@+{S#d1ToO;xY8Z|0 ztGb)Q`KX5=+>EGZ`XU&HL|n<4W(zhv$4be}&QtM~o8FGEpv-oF?HI9vy_|u-H#@Zx zvQam~nU%VNL7>2WLrXpnW22+R8CA=Wn9!~lLliJH6V4~?NA;K#rjE`?t8$8z*`6d;cY$3Pc706G*H#4y)XYkGED6K-vkimF3I)1h|6{e6TcgOa#=t|h>9KIx; z3aw9tlNQ=14pn>1?=_afsbMlg5_Ri-&Ap)!nynXi5W~~8=RXNaV(o7CtLTQWNzkl) z+=no&Q{0ZEv|!~Neq3n+_x`CKiyZw^E|Y2l!>6(3Pek(wJvlrFXFb5t`aecr0t@Y# zg^tAViK+o&c#2IAQNj|-jX>=U=kZ3I)`tj+bf1ZS)oDEOUSF4BH_z^E6ldAnnWsva3>ocs%3Cfe)p=7tp5|jWijCSNaTGz zOc4Po&XjDVvA#&TbTstXKynxs1o)x+r_hCv3-J31RBBlIUHW_X4iQsp<*cQAL1M8~ z@Nls;U8@#w*eOn+s{Zkf+UY2!s;nD;=#}<{xXQ5oZ(p#?ZHdEx9xQ6_*bB#g>2+5H z=>(X0G-9rKkMEk`bU8s&24{V25kjQPC|B%9Q~ zFK0>f0C%_+AQwep_CWUVwyP{i+Sy;gyG$hhFEe<3Gu>RVu3;Od0X80Wp~R!8E3PBzq?i1PH8 zmd*V&kGOqSW2sm`z0COoyuw@>WzG#1ZZiFP_0A`Il|v)g5e0or^f1$N#`3-fmi}fEHFh(l1=^gVD2k_+`avA|Fe2x8AUH-}g9)$- z$vzNhcf;7IqCxl$`us(>GUplp^!_ty;OE*6m$C}+%srz-qg0c@rK>kER@ChtB**3u z=~|HI`xb|sX1BYK_*^n%AO~y|5k|!Lg>gWmkGhF$C&uk<#@iY26m`sviK?C}CouYV zKU*gmQ)V(BU0k+Kc9dq(IDls8-Sd%c&9Flzw@iL}?P0lETaHRqnzJ(Q>bpOgGtvI)H?&+j@&qs|DEd2WPejVd% zys~|QU%yBE!R7R=IdD-y`ot-)<;5ycr>D z=C#w0RT+P~j-h9Xd5+L|Oqj2+lR4FkwB_n)DLBJ;{WkoSSKi+*(%-vpH!sP1k(V7^ z_?p#g%Uqy%K12V`ZVoI53_u+HCb+&kZgbq;5pD}|sb+gY;K1K=k=P&;7cuUp$xP_O zpS0wA!tXEv3VGqNulLhb;25|87AbcD|I&=L^`VP>HDTJv5$XpWd)-Gx`opo|ij%mQ z77NOLMf{alQBZg58&hucKE2xBgLXg65=((H+y07kWQR>(r^|(>1vF9lJfh;<4;8~G z{#yh|sE3tWv6JwbyY>_ePb5ul35+Rz82SiLs#u+Lb3gBS>-X>DXNIrQy4(3qQex3# zW&_r!&f~kLOvh<7%70mtl5BPt?0;Q@_gRtc%)Em?JeQVYxvANEMXYVWn|HXbN%Wd1 z9HaadBca;Kqy@(PX9XF4^=0LmYG!YJSzNT2Hk381*Mgn-8%P=OB+bbi>B-o|^}7`N z??N6`1)p9ju1svlRjtcRgll^!CG6o6y$2C-yRR@Nm!G;Fdmp1PJJZ6vpINyfzRB{w zv5k0TN=VRj1h8JQ@#DyLMgI)r6WZA73M@Rf7;nBaYC1rF=o}e3TK?e?={{Xgnh@*y zaw@|sDf|7Ek=5N5D_k(c!^rc!8mCtl6{A}3<6rhXn3UXq`+V5Q0=j@P)usbMZ~yXF z1IIUzvp#mE;Te?*e{N!Pcv=+mfSY9jELb*vI_{+az3zg{k+M-gZO*9nf?CKC@3w&=#q zm-ekjolWx6a(dTn1oqM|%5K|nLe`>De2-hw_ zw!PA4#D{=ygdT>zbdMkeub^w|1BPJ++*E~#In&R9*$|G|A2at~MSK&m2KqDBLcxiA zVY^;IqBg(QpcLZKP3xX!Y>yO!qcG)+ueXcZ@nf6he$Lnm@1~Ir-`CH|=io*CaeIjh&toa6LJhn>dsMXFsw@tsjuVdnP_tJP@r`5xLp z`{5Z%JkdVN!1lv-ee5}LKnnIGFhu^H$Rg#tChoabgEQj3CFk6b zSjct(!!cEJR_qfKkTThYu|yL4`o>0raKHGKKW99L{-8U`n<0+fsHpi)djOVE8GF_L zbmh&2awn66N0URF=K3-t_u6IC8Z0-S<6K2AP%xh5x%3H+bnmf5C@3d;>Q2q!PPvnsz5R);qYSt_g13ek4$Sz}Xt5(z_5?S|Prl-6wJ3Z-xm7=^Q*n zOsVEk?Z-jY%msVQuBO9}c~3^;sw`HrNt_5sxeUNihyfQsNPjjSi z9^oN&DNL5E?LPa5Zhk2})NUlbozwvcfNtQclEF)gz8>Rw_x-&9CuGo9_+}U&C4#&k zJbkHhSIFkkwgUvcHFyuqPeSNDYWA4e0bBbHYHzOb!+V`~yYMD3Tdt__QVvDLS~v}y zY`U0rX_IH|krnr#j;2C+;OhIT@(59IaB(y;ld~H5o<&DGZ+UqH*~t5X-gTL|YvwJu zY-Aqsuj1uE|KP@UtJjJ%%|RtsVqTsrVQY@WqKc+!)T>y_LVL=YLhQ}DgQG?<4Fe^8 z31hpKdCttyF5Z}POirnyp66%Z#x5_a+#4DD@Qc&lk zjK1hw>j3x&CGh1gkJvcW4!w<44a}4Q%$!~`W%(7zwDO@4=$CK!!0wm1+s37@h{#uD zFrlrED6tx8l5DBfL-;$MC$=||ZSWr6C!QFaecoL&r-xCTYG2nIcHU_;KL~C+O@QxU z%&E8qpDFVfW^7N%(lG6X zJ>v^JssUgn^epdBAO8F+^ri~ zCH5^TIQaTX)z+S|709L=%mV&EZ_4x7P+Q@yJ-h4<^&(FqXP5Srr$vM*Ka632S5O)@ zlyoJGh3W-*Eh@t8X|lfJHIj*^4wp1ki5Cqgac^CC%6sI3fbO(L-MOb=En`0Me^XJ6 z)fSsKgYPaq#l3>Pw?UAo2IiKgW0oa04=J5b8`rPAnqs}qbGUZ&?Xuy}sp<@iA*0(!Qgw9B4RVebk8&|u*d-&q65@E0gm+u4mE2dk%aercidVBVe5`V692nJdn zzN%cbsHo7-JqmnSzdkCbaqf&`Zvtk0)QRpt+33w*e%|c0jlr{_KWR&O>Vt>gE=(S{ zpQ9~;vt5t(5gIO|%XOo{-f;>=$CTX3I2z8RuP?G4KIr*{CKSE4C=C|~2zLF$14F}u zZA(O7dmo2s9E7dwX*|yQ`UjAa;t57S%kK?+s!IP1oId9cK*h4xXJT2)3fI{R6IH!p ztYq1}PU|vh=@K!RSbgBs<*&_P@OaNo@;}*Q##?p*L1p@piS5QDEnBeul1TP?>lV}4 zJ4W4y6Mu-?mOoN{g{LU~se_~OsQ(O%s-Ee#aVO=JSw>ZVXN@k69viMH=F`?_9A@aG ztu?*UE9LYBQ3KO%6YrI1#*Cm@l{~p1*te_mB=ko~aLy$5kEm-aY}D>I`@)lLpaT_> zkdo(9^~hdY9cs*Ihs3q7Tp3%ewK+!mDc>zudy-)dHwE6OO)JcfvA_!34=0Z>=Uhs# z-4R&U{0kHSe^9Cu-=jW!Yd4O&>cQyKfz@?mLsnwr!BIpxbY1VfXg~r{L#QrRHnils*Y!sA)Dt^j=@AGJ=<9loY@%X0Nr0{TNW*9?|1P*-E_xm?+uwv+s? zmXizL-N5%OJrebm=`I+8QJDTr6BkF!LrlijeOO{@DgP>lE7ei_Pp1@4il4QU-(K)F zW!jZRSHO1y5BFGY<)opC*c=pw$EMOhc+SS=m9CBy5yOlzd8)`cZS* z6ml$#2p_SH0tGI6Nqkd1n3_WD7ScOl1%x-K;vF7w{pS&oWww8C5FT_yy}xt9dhozE zx4KqMBfNB=W_e3Gtn2ma?``3kDk&r`Lq}xC2#~`@ba+SvS#&C z;_OpL#+sZ+(*9iIIz2Yk0eZjoor(EYrcH;Dy%hNpzCe~3bfd^F<2VWr-wYusVxrYO zaEmK{mOsH@cgO_LL!pd`(z9LqqUOJDx;LzRB?LUk*c%Bemhc!O19}uEfrm``{!Ty8 zF&j*Z)JU0Z&Y(_>^Nmj2kguCb#3ReqE742_n+$5t^Mk*WZzzE%M*?*r=_An)dOCiG zdYRuns2pMsWTjxX+K(ga;VOO@g<{gV?*ArF1}VWf1((m!hy}pjjqjjbQ9jcnE0 z^PA76{8`dKz!^tk%3yD^_%#y&9B!}A>S>d-K15E(SvZP?uvA>W5GT6AI;CB5DRu8O zXH6o=Pf$DXNl+EY_uX#Iadb7MNSG;6s^}EmNypQc+2>v;A>V2lBg8RG)`_hO{^Xiv z++eZw&u9un$AE`htmtVjsZZ)yc6HR$DB}xC&Xg9S@FYbg!Xcu>OpDE%SU_9BD;bc6uLt6V{ zZuU@iU~QE$S?q37SM>IgQ|z?B^d-^{K861dXhWSm(JrDl>C8wYBM zd9U_gO{_(>uVCWY6op0=v@bv+}K69Zyre*5P} z9GvXGfE=!pH-)M;y3)m*oky&Uawg`=AneiX^zf`xE?Q2eHV$lq62DIP{0Fw9VVuY( z12pBwJ^OHL2flrCB*1soY5vV#r(1}iHWLmF_uY=LD2By^!goE-E~VV~n}qu{#FfF} znGIdIT>uX?+J#w{#Uq~L)f_&BdiDO_>e6_zgue2Q1PPUSng14|TqnOy`ec4Y#FqK^*jnsAVAHmzhr1H7S?D9K34!D~OM zq+yAgUc`i(Sga~98a{Ay<&mfAF*Ju)smnd_&&K9QBTt1>K%-Rtl4Q3xV^$lgjb3fA z4dz;f%|(Wl9I~b5JR%ro#U!fnSlReUA&QK}vgtf18gCmP+NQ3s`sLuv39X~cOA+b2 zmN`QN-kEb9x`YCGRSH9Y!QtSywSJkA zz|jKJ5Z(_eT-G?WY_~&B zlhi7(q4eqg{#mz8<3!OAIf`clW<(X~o0{Au(O9f+JxQZ}NNWw8J{^qwGU%3>Um0;Cj;mMZiy0D4<)+Y`8YO(x=;5f3a($|uElZ|Jxm@a5Usdz+ zm1kJqx9Hg4PM~k}=|Edb9Sr$YWSy{8 z&;nAht!RUTVLzimmD(_X;+v(v?{b;`|3%W32SUAmZ}-;CEyXQYD$CrWR7A3GGq+U| zb4ytwb5RK~Az8=gmJnf*kaa3s)=BntvJP3Mj3rBk8BC02hM6&Yzqjx2FaDS@3_otO0%W0 ziZtppI7~Q<)#L4|miORFxOF`?Vzll=-Z~S6RVh|-;})dNUs;tN{SVhJL1nm4f1$ay zdP$FsvBmzJcb>$NTeQtvEV2L`5iaN68ET{plfoy3Y;)KpQs#`utynSYF2Cn{80fvn zUcc_vUBKGBkhD@0+iMpMBR3=s;&j}{pvCV<@50Kt2(cU(PZj@=8Z;Jbp3YBfx?ovv zF1~;$-*YkR;XQsm5e5dLcnSab9&;Y3GI(|T<+;g&BmkAm4P_3DtEhv~8MY)l@@9#Z zJoGbmD?Enj_jJSx9h2kk(Zg z0vS-o+xR@Y&D?wOa%Hc7LF8x39~w<{j=vGOXl{4Z3?M8s=vON}pdV6u0NSvpvfJt- zj0jqmVYT32SgriUtP%7K$_#??^uQsH%UgGbks^vyM-E$%n1yl7%piWfS3M| zazicGqr~UC+*>!elO5@@pC|Z95xoZ)-H~L{JR_<3A^z47NAr4;#hQ@BK{RC;CJ~#D&bSTSO}YBh0?Y=aFW^U00ZS?PbFF zfM!`up^hFE%XNv3dS9On_N;e%U%YXi!t*)fGJD0#_L`cXSWbl)pn%Igb{k?V2+>a^ zN12Mh4yL*uqekF0@lV^q>wdnk>&`2@@V8Iv{e~KPjwnHV$n7VBvC;Q<1xa6^Kt74P z-{bBVu(KJ2U?YJgn1yzQWNXqRdvV#l%D1L$ggs_r1Zfu47eo;$L~$!xUd-Ma^VKGV z+^I>L{1~}~+5E>$@CQjd3pV)+sKQQ5hKa64ra6{IB86CPoiz>)lkfHNM8EQQOzeQR z2O(>nU-$=>dF86{0=m~XrJx?OYePNm7mT%`&pV(G&|>(FL=9(|*%NX?f*u26NXNB{ zeI#vQ{dL6WVWboRil@6h+#rsc5vmon17vxQeC})obSaFJV49j+UT|;Y&d38__x63F zDu24SV&?6Y9Nd@~zTS_UcirVxYFX~pXjq%LaHQ!i4|04#MWjvpWVIY0TxV+kP7*+q z`l8_Prw1Y}BP_t~Z4+UzPG6ku$%NZKq*TVoL!NcawSg@o9dQAOVr1GvgFGeaTuSWp zF_uQGq@yq!^6}mRwriYIHDLt$5M-lHZCRVqSeX>+qarkTP=3f7F%9uMhhgJP)lRAE zGL#%^PSevLI4LrU?(#ITcZf=i zC~xjF`t)I{Hg@d(t4m)K43D>H!PP+E?~*)k-Rlq6*d6_mb$(}U5WM93EH6u5E8d{e zkx;Hv3iJxqt>r)oh;lJhR9FE)dXgj$Hk_^_x+S545V$at0vi)q(Z-Bv`W;3Cgh}zO zoc}=bv6W-Qa}zQP<9w+(dpB)aa?ur6Z18c-CM903$C(9`Nof&AZui~IiPRXWripuq zn%_yNX1((jU?eu1lLEuKg#9b{7h3>jUVnf6WYFB8T%_(bqKmZS;g0pP}ZTF!SwDKrc;i=PMag;wI_@~_x$3g!mVVS z-MYtW7AF3cV#lUY(d~q2kdaysxI`S~k!N7Ph{AX()cUcjv27AfMff3RD&^~nO_4|) zETg$Y9@KSyCMOU{#%I~8YV-U{yi5zz47Lr$q!efd^2DqT$@V_h+6+4^_8H`X*b|b+ z&F5XopDZlJ2sK6jjA~UNeB@#qPrOFg%00sUS>e7HrrNAG+g2ykYumePXx2MNidVy5UhkGz zjZ0V6yw85t5;>iEmok``HZv!rVUc5#_-zC?g0CD;gWe8JBJ855)vCu^#My*<7O+b~ zxyJ+_)E4J9-=dH-rSp>oA!gi{%nP}8NM6(|w(@vLi0Cx=mg;e4Ybwz63mAL6r*|@Y zh71t&C}Jj|Yi*oF{YGCN7yirBvjLL%?uiNVRpxY)Hv_{Aq~T(8JR53byJ5SUue~Cd zYXu#=*BDlRZD`p*_Z`pK0o$J0jVtj|_#tJFJ3}e5{EuLhyF2Fg>}tS=T%%9S`rDn; z`#isvX&L9m@FU9y;u20yRqHL+xcl$vaNC$Ffn7s6cFa~fx|GVt2>^Be4M%ZP!#@IBO*Ahjbghbf3Fc$VjxMz(U^&aXXWoU@Pg`O3Gi%C&+3gx~u zb=QLz=k7seC%;X;3J+rNZ^@2)K^`jxe<{gyd<4c9V2*iGvjz;B8JhePch)0t$0_1N z(>{@)R3+=KFgONsiHKPj(JitqmZjJJa5GtbbY*Wtfw;(eba_Tvti3t}5Hw;Aw}4Ap zkp*a0`9I@}0xP=0jD4&6hEgP79Rd;qn^go!WF45mVL1CUD5pfZsC0>mBovN|4zlaR zBppiA+r=r__X)OIU#24jG{X<6Kt~i&goPJ+9e5e)$+^My`61P>tw|8ciE2PDvZE3b ze@Ioy#Yq%TYs9fjzlt1YtUPuZR{44b|J$nk@*7I`OHjb}Dk1dsj4LsuwSBr|ig%D>C$8sa{eEiG256~ayrQF-?MPYIM z%yoR~Pt1iyYM068&Mn0iO4(TtKf>5u789OC{M~u|!G~21iIK2EQJ)|wKiTA#Mp*%x zQ-I2}U>qdw%_Wj{L`#AFFsS_`YxfL>8o_*LN8MX5%@n}C5^XbN8!s3LUN_k00 zqzKT1-j{yw&QVLWHMPsTuqLy8@3oY^G!1M|14%hBLGB&ws@|2Z<8AAg2mq$X{QYTQ zyM{Suad_PdKEjD`Y7dp$cYB{G@!Jgd9<2v;K~*qhFw}NBS7n4Ah#j zi_S>cNn=rmR<~QA3tl1aeOF6O~S~njkryYW|7A)|WYm;x~}O zf(0D2nB!uP5x2}b#MiqTb$~@!AlQFoMN03GRK;{0=C}xDqQ``Gnq%S>9sU8g2Lils zTJg{KSb1@JS4&d@}2pcLyJY;uuce>stN9>TdacTk-w@ahIM-R)VuN&-WU-FQA-vqsL zB!x80B2zq+oI&N+gJZE$#&8{afS5Gcg36lfgv^vSqp%BknCFs*=1LN1r)HC zLc{?adt>6iqOo~FisN+cJaF$2Rf=cZ7Qb@72adfL8C9I<8V>(r$iEN38$SY$U1sKd zZr46GY%YGeeD)$O`BbfOLxs*3;1BK0KwVoH2!Zl8^*a9J# zx?XDY9@|hQh2v1jNn*h!*W$_NLjLZQfS(*7KUj?9_pZ6D=J~m8sG5g{0Qe9AdZ2Mt zw)}w?=?w^mfIN5Cx%Xb560lBNA3|OhLddgrnwmxwY_o;*IG;wDM7&z4Piq#_B7`0r zx?%)TO^{jn8$&FX+=|5tW+@uE1GAKqUv6f+PW%ksOgP&ikOXP zqHX39W+kCLJrhCI)&#L5f=w2yiG+fOuP6U!n9TLA z{2^7&ro|7VR?Ya&urt7Kh$qQ@P{Q+=rYr=KsE|%i_2~E3J8cW0p-({8B_c^<_V9v%Ay>p-HQN|#L0O&1zipWWhcJ^Oi^zLyQ-Ciez|Hl0utIbefYXl^scJ><&+vI#B85y)*| zhpCM|juMwZK@dX_|9P;ep)M8AlU%SE4mh-|Qw{R^Ws{!{5CVR0Kvx`E&jf%Ud=+V{ zpg%O9lKa3uK7`V&0M(vuimPD zAfSU8e1E~_%8gw+bdL3Iu^{pnc@l-|Q1DkROxOdl?i8D@a&+VAJv5>T5lc^?txHc= z=z?+X+xV*0NvzHW1Bb%X)+l z8u)hRmaq)?SCk9J=AW&Ve)hlT-a)L(ny9ha17}sL0B|wA(b^3BeRyJ@Ge?pPz7hKA zwING-D-^Y)mbfCn+I4|w5+c=Il4|f$j#ryTgevFoyCZX2Fr`;~X$Zz1gdU8uHi#e< z?~^VMB-{JU00VM0M{IHauBCU$yQob}Q_vzm!7g{&V2?2$ zn87q7u!a5u4qBltBm$Ztn98^W<{yYa6xI_o-Sl%Y9V8VxV8usOqzl|HS7bwSeDDZ^ z=dY3E^?Uf5ye{{;6i2!3s2RUxQ>dMPJ*cgea_XS1&YCZ_w zX#7J;-NSt3i35i|$XJx@V|*6tbzLlR?jP^;923#ul=SAWX9(s2R)5jJn$meYcKpxJ za1@c=?|*=!io-@mEshn>*hM^V!)^L^!@megf?)b2MU?Bk-u)txFQ%Y$a(VQJrW-_y zDtnuTJHlKS#8O0mGG7@Id@%m&AJzX?xc^fZE$W%G4Fe^W<$BywXq$-7Q65{yi}pYc zK0T6?B7{$U88FOvLTiKTJ$6D2{ZeK-NPj(6fYu+DZ-WYlOv z(tFUJBhnHOVxemN*&HKV)(Pq{Y1@v-P-=PoDwY5a!K?e=35aTWNi*3s8X3iH^W^3P*sn!%6Wvac=%l8z^YXQh0LzS}?>PnROkldzgV8(~8W^D5lKu zSy$Ol%m!J9XcR}LR#Sn4+H5B4ccBzAsf6iDFyg3?=?;ou$2`$2ZCZDrelxDh_OE_j z+g8sUFtJn z7QFEu^}Xe*PjjnSoJh+l_4@n5QqQWY#VMLFzyj4B7WyI_sid{nRUz@;daDy~72X}{ zD#a#u+N|_WiY>NV^{)fO#Gvr~FCS@eFTDG6@3z-jmaTzL%k#a|UG|OtkQzGVmpJ?f z=;8j;eH@CH)8?8anvNG-!hc<$Tz9nL=cT%-YRaTi{c5cBfxXxt7*BEf9%e(X{E5!> zvML!gS=Tr-6fyE4?o)P}^O{Fw)zSKqfhDrFe%{ljY>k+YLxKw1cZjlo5(1VVd~ZBO zq`HXpW*vwa)NE1QL6%YMVHTpp=E9$e_y}vEu}?YkStsiom=&$Afuv-5QjO=}=E=mP$r`r`Hr z)y?NY!LBwK80r>T9z(sS)^$7k=2^no_DS|RNX_37cgpF3C@?TxTlHYwipQun#wXH-(m0W~klZeFQd zk^72)j$(uXN+u+G(HU4@#Ks%k(Sklg1#-&}%en)#;(Cm!q1zc!)m9x@8SkR=G)Ym8 zYI`#}p>UpaZ}0xkeIZ}|i7$fj&N)Q}#&=QQe|4#guH+P~AlX|wbPDQZ{CB=@Kr0WW zZ@<&)>QsW1@7R}pJ#`I;p}f0pA)ElwoQxm+P@Bqf;wp;MWeFa8m(GKRC3rZ=Yz~GR zOA5mniOi@uJ{>m(Q>n;Z-kTG0PuV7W>6T;ca9rh5SkT8gOwwBRS4FI{bYg@;aZU4HxE8aV1E#YF?uU5={|8&y1#{tm z-4iF9PQ_?>2t6*bhph2HvqCf3FA`$>QFE?p+QeI5CwMz=YuP(w+;;4@g((2Gz<=Mp zKCQ0VTg>`wLycW=j`wU8cgLg~)wNUKS9ZCz4x`^nI@)O`d5!u-JL+%nOnJyn{ASrt zH7+24Ad`O*-HNrZ#y=t zs%Z}Th6KB{d}UGXe7 z0k#vr3hKq*DTyK@&xlVzl9{7JjNIG8l6%5=Q_MqjHddsTKr$#506zVYN`zG@64&G< z{RjnpYoIGPJy20Od&2tfAwM)*?io(D=2=Uz2 zGE7a-;8tjC7iYxx>4G78WGc0_dc7ZT#a$R5p`KoQnGSz9`>8;e_(ad!i74ZyYb;S7 zWa))Cq+)z_gZDVLyfZi4!@GB*Q|u$o+gqMzOvNq=gNQJS1=n?=cbn>uuz@a#aX?{Q}M zp0N`bGm&ivXp1ise(G*`Z$>}2NO_ieI;^|+< zCng>fixb}|Ic(Y0L<`Dt9o5EN84i-^>uc)pH=5M(s{o@k_dweXrs`-3N#8;Tye!{L zP3e2DslRYJOAmcv!+nO0U<(ypK?AL(AewxIIU#cIKYnb6CDKWrl(+@7?)$_-L4v`J zDLXzja~<)ZuQhRc@4v-^3IWYTeLMS*GO`6e*2-N%ePY*C*^gR$sbGgNGX|7z!APLzEFVW0L1Czv&1v?!x+!>zu7m6k;{ z3yoP8)#e`JITbi=NuVT!kUkmxJ2v;p)@);Ps2SpA!c^4kvcv&j3ITL{ohRp6K$v^AL}-EZK7$h@j2ZQU zvgF;e25OJ#lQnCT!r&LvE?E&T`dH=D1;ySDf^vF{THP*_L$nUUt(*>n?zZ62f{E#x z)Ea7ne0b6OLF1v9P4e_R3ubY4_db0vx$>t&BG+k-SWx`eZ$y+`w`P8`Fye5|!t*j5 zBiE#&m7E(!mzcLepyl21#pEfEL*cT%8c(@FnYE9nMo{OX;5VoTcs2zf12kUdeV#njzL5stHO| zv}O4{5o{~)-}zV8Gwd$!A(dx&-mhEoo|XLp;Q2H|(?VQ>!&-nd^4I2GJi2`UuI4W| zlZSn1b9;hkj@5i*x12zMwb~fWMQxdG9FTXetxxZLkoj$&T+LT~iq{Keen=y^;wifJ z-wK*rj?p!MBWra};g$!t3oJv9v7hz%N9;-{ivLu#ydeY9jf-@*HHa@6%8m9(#ps^E z%i0=39e9_y&S;wA_NcDg5mzLA9Q?AfwxYNgU^%SVC>Ln9z+2fSZCJojE>(!`XO45v zONKx6+aIt_QHXD+zSFWjV0~HdC6LTi_9=+d<3Z!Zi9q&e=HwO?pr*Y{vrjoK-N1~E z<^!Lh~yfE;Nkur<1nJ9nWjBlk{T_~4zXP~MVT zy_gl#%bo5DlDCw*T4Pl|%hH+(HNN-9I#wmk7&*8$j?P6Wu-= zc=?#vW|n^7Gp$vjo$jB{AwXv-Mt9Cu8oStQO57@SwsXqt zb+*Ur+n-C)1!eM>GB@`1TFRiwV&*{BHDnfjebNE}t3Y1n($)_gcp|56_Qkwc4LcmX zMn?F}1QJmN{-@3ctoK!(d&VIZi}bP|F;)g5t#ux5E`Ak;2;sP~DYR9%jP_0QLI3^` z5)gEYy_;^T>1XXh3dNET&I+=srXg!CZB>I_& zBWsF-(E3`R`MN7@n+zQLMpyEEiYZVRetZbe9aaeGm|-(2-KEDj$?LJIxfyIgeYy%A zL{$q%MI9%xQEYnA8$l5J-SRzp`u2JLfBr&(q*n6{_Dqm)u?f_aFu=>q{vCM6S%ee) zdCOvyyr3g!6VBpXAuTL@@#>K>Mg!QU0s`_L7IkB&e&`#rcV{Gl)IgUC%;s*Zx{z{Y zWle&A{T(6=d*&XM`v#Du6WTR~qn3z-D7HA1vv>`NKlU57PD3NTB1ucVMQ`!0%nKjT z!Y>kf6cGO^@0M)!WW*0NIyyvKDM>Of>S7mpfxL$qsPqjs<6o?fUR@v9IgjQ%v6PEY z6Fn?w=I1OpXEgueV%=807x@0~5>!P=7XG%Cb$S=y;f}M|Tn35H?ed#CyHlnrb$u3m zB~>#R{kp2(O*CDA5^yv#&8KpF#X9wKeu{nHC7#>dB$0V+YTEvsEd0w*>SfO{bHHcA z+H=&y^Pj(6NwUoV;3vg(T9PZ?Wl;X9EGc9agZzt*r(zm!<=%0jc+#f=T>3ERzz-kl ztrXqsUm%Ada3hpd@K9rPJF#jrS)xja@n=ipYwp(u%Pm|`QC}BB^Ar`cTo8E&*fRUg zdmJ{+1RH-XBf(t_nB%Q5XrAGh#(!m-o!tO@H;i^jJbpD(t&+PO*e3MkFBG>lC-z+C z=cohbiowc;F0E*aF*@p17Nqg4H;+IRm|A(=<{5Ush7P z-uhic(l!=^z5>V^$rwC;Rd}^RpQ>MEtpqpacX`@<4c5FNoigYTUM_K5+I7qI)G2Tv zBTi}Iv=QuQXgrK=z9W|k9rQSt((e+6x!4^dG69EJB$YOZ#ubBqEdnX7BgONUaE(1PVj&~&HrK$_CCs`J6kkBq-OTz4ZoBhvOjo=az1daBN z2)FS!H!o5< zri%C@aE-{HyZ)!$;NQQPkE_RS!dt&pE@QAmHRZDxZ05P$2f=>D`eRuu?6QXrR?s{y zU1N=aWCFuo6-^&x@@6-7{sUMQR|%5$L+UAKp^6Js+oC-MSIt`qMV$GCvj$Cg;aAY( zm`HNgO|K-o!msRvk{SF*}jh-GYFi8x6MJ->mJ~u1S8)LXpZO_5XPR_EfCyD8k?V1(xnY{;?^gopp$ZoPA8YY zfqJ@ugpS}6Q5~7z-_$z-4J5F6HJ4nk{z>sY);~89xt2b9`G-_70$+3E_8Ga}g!>V# zxrD_W3$x7xwX~O)aBNros`3Tyy8z95- zJwX&|ZFX(_baeqp3)+sD#5p7mg=Vagq$wPg7Sj=>zQk#FsP8gVhQ+cUt&kfQB??)X z67$69by2Zdm; z$aU0l-pgfS!BLT$D`FxMv9pWu)oDx9PUAv?SY;Vbz_Q9Q!E>Xbv=3{|1FIokPuGE+(g__J-1u%6rAi>&^4Q^WfJBO@k3$FKl2<# zwY9~PJXx@is)k*?Kwzy&AX#IH%P2ahrb>ij)T&bY$dxOZVO<>Ls;9X`=Y@JELfZuK zpBx1yse{E{BFtv(*EVqO9q;$BiIH*a3{b)zp{{LP_V9wSbgGU14g6X;^T>((sr^K| zMX7RRa32tsW5RGml}6yaoV&L&{N~=T)f$7%FT(lCk9XD?{r5*;%4S(>Z&IvcR%5mt z1NIiZ9XQnbKlJM4Wx7sdWp6{*+qsfqQfrN51UL|W2HI)>3K6(6xG-2k$hTZ`LhP~I z4@#zML*vJ8y2|h1)QQi!R~iBFx^H>lM5F@GM0BMh&!SbI_e%y3enFZk*gZ@mO9dMU z2>aA4QeFY);94?p6zeA`oPBG+$;7iZonR4_B)m|D(j+4OvQ$zSZ z0RZ$Y+at}%5wGr?1Ii5LY%7?kk|u#S0ZV8~O@tB4qOkryo}F!DQlATJ6|eZX%s?Gg zd(+E4vmj@M^vwM4pzAd`w!_0h<`LWoWuW5^#TuGc+&V3w?Y@D2-+&|Dr!G!i_VD)Y2uJ}^Z- zs!39a_x{BAYpP)$AD5=4Rgg|Dt-w5Ua>#5q*Vfb@a8;Bq?Qpk0ox1R<%zHJdnv;{0Y>s39E$euvr26Yk-1LeBvk^r^ zT^`w+ZqevD2?kcKdm(=__S4&^hMz-#=b3}C@M_=D>*V#;zUIaFy+)@KFFxFq`!Cz` zY3cD1mx5!Tt_Eah&rRjeO%)uOKQfgSkm6_4U=(V|QNQ(ITm;(~1m&_Ei$wL2x$>oY zvI{#?t!LcffMtD0zyzb7fv!( z9q;{UVPerhGFUK;NePsi4E0j?*$fWU)>wHFCYGMDfJfhX=%k{ms|}4l5CqZ48X(-S z`K6f^+$~BvY05{#1goI)|8EepE0m*~Jq_vvNF4D)YRmE|(dPbW?mzEZ2($5HWE%@v z^sLd!ZWeCFGX56JvmJVxiI1&uj;@Qf|Ip&&>(K$%i(0r%HoyMHb&cF?c}EW6H|$@$ zn7uN>ZeB$+K~;*PKVh@TmEW|}XDIufCPa$7k$3R)0wsPJS1MZi7feXTV5|6ky zt3fsDa}eC~ofmnsrYlCiXc&cX;U?A(v0csAq461L;GhZ!R4J|Lk@)S~N4LP>sOWjjx89+hn zg_MPE1-6VNj&$Wr$&XJvS^4`0q5T#5jjp&6Xd_mnade>Miy%Hp#cI^y&{?9W>N%g5 zq&Fz1^0!)%G0lLtTX+g;yI}6jtlg#b5vG*l%1dG9e%f8=81iAzez^6h$ne9z_Jf*R z-ce{&*y9Joj?17sm17weHWsEA9#!4Lx{z5aPNP&$?`7emMd;Za`EJQ}*4`{C={H`! zxae=zg+%QK16HklS>KLUK-JZWJ~jxJ_DHiDRkzUF)KyB-E9p6KCm*rkvv`7S$nyC< z(0R2lgmGN*dA97%@dw7}qz{+jItuOP7t%*7HFrJfroPmi&lnr|?$GHT;-T+&kG>1I zk6$fH`t+S>&dpn1fU47 ziZRX%Kmu$2$t+qLTb>spoh)jx%qD7gNm`~maoe*QD<^hV-X%y!YEaH=XD_HI&C(MLg3ZdxnUQ^< zN$m_P|9rUmGH5Qlek)r#j;IRK15orR04?^koV9g^&FxSy&ib#ObdcrK(d_fdc;Qa?oUIJWiOXk1Bi&q9t)|XZ7j&Hm3>c(pM<%ic;CK#u&{9-Z|-_?99 zy8nZyWBd!3cVng5)xP!ksTa@+?;Jkj{A3zJ`Uz0(xqO{cKk5!|vQ`isUMzC>T(irr zWBTCdAk+?@*4-l?wR@lvp zWo}qLMov)LZ`i-m)?iWEv9gnp$G9jcUPe!xcbla+!;Nb?!^?AvY)pt$XYO$1WeKe> zBDZ>@0e)mUEiuHlh(@ z8yu8x={;Zof282ua|XKiyB79u3w_ezR=SXJ-Q}{Hvi*@*L$zI*w?p0)+jTJ|KB?3< zXJQO%-J;tdWq)lX>M(JnZOnDT=Q8}T>(??PQ9Cuo%doYC4XxQve?^xRaJHo$xcI(1 zqF4O|%oM4uw$ICS`>wHxgEZ{toir?i4&R@u ztroLixkql|NEWDTEbAAE;!w(i9tOKBTs2);&(})TN^QM)?c`Z5mW?HeWkt|ik9aW^ zwu3M6jDm&X4%e~y36g=0v}+TfEI5&mmAIPJEdF#00iVw=Tde*w;9#;n0m-c*g#-Vr@+xY zmIJ(HZOvxu|Jv`nI#<`|C`wJ19$(cRBISyJApR_NxixjdJIzb!ViJ(l3p3PS_~5I= zICcNtL7gPI7XMApdr!Z=;6Fxdh>>7&4ixy)4mj<*!}M5CvJ=!kWVhtliV;1lB28*Y zjh<{q$?sdqSJ5cv1 z=)^^%=o(0Y=W)$>mo^w2?O&hlTaZrjvPU05o>HE)`5`rha9cG6Pj&*vQ$rH_@LMG# zT*~hpr48R%)I|F1LLD>EL3k}QB2&AFrS_4VNl22&kt%L@b3JWG@paD64>FVK z;b_AC^qSWHqF+lk!17+bl{8T|u@P6g5tbF|rP=d5xHju-n-(6pvs=7c2$`~l$rIr! zw_X`Icft4Lv)v-AoPX&ymi@iMBV%IgN}X#}yd9xS z{dK@N5`BC&$q37R7@p>~%4|3S?s$6EvoY1QOG87P{;cy;p=IXnp)t)gPhj7j}Kl@(&E<|azA0}F0k?D!J7 z)$0Id#Y1?U7hop$pU-^*R1)FW2X&2^h;jNqqXuSf0IEjLZjq`R!&RZ;bhNdWBM(Nv zyNgRAer28ihJ6;H8iafjmR6k4^JKnV4Nfeyv?)UbGSJ-^!U4&BW?h%9JS*_Y{Roe? zS3q4mZ(MH6Ziy+drY*Qg1gf~yMqLXpdA}ep z3Ex;F1}2U4*UTlaQ0D8LV&5#X)x)t%-t9|Pb)G2<+y;(LVJZkLjSTAuXFJoYlP1Fm z8EQDwEC*^`lwc*8(!#8JNt2?150Sz?bxapC9Ro3T4A}b*1kHpxu~Y~?*6*! zfgRM_M+EMas5iit>7lsB_Ln{o*H4sL$kLZjQ-! zaOGPC>T2>qr7Lr%FksktW%1^=X}IMCVzFo)gf(Kc!NTg+w<7@kafoXGU`q###rf?2 z!gBtc$EUL6*vxB$(yt4vI!BW8gTQN?a4Zh|8-`cL4bB+hdVqVXDwe}!bH&o|$JMD< z>eO(YD2XJOzX}}~jcm;nSOw(T96adr43NLKFu9&iLU5(WTZ~xaDt?2ja{K_9UI$c8ETp>8zw=d zBmK;-_*zN+L6yNQK}Qm>z%Pm6+aq_G>k(fRD7#Mmit-OZ{E|je zo$$E%(?t!zDk9BtUDWho$3lX~s78VP$sE#(u~IN9N{xVgr zwrSh2Tq^i%1f5g-nE#9fu$Xylc>@A>_M=#8;7WGOB&Epx&TTBZci|4q2g`=8B!sUA zSVSA-PffdcZG;wmhH&877r%>m|FFs27bezA5&yLT9-C`-;X1MZd#)Q;wbGPHk+gX@ zN9C9QUD6s;ZFNYGxXwnTqb047Y0a-KO_uL1^&XPY8%a4~Gx4Ms7adlE0KelhkRmI+ zm_$i$ajbL7zx!^{Ya)MNK|t1Kv@Wp|jzVm3##&7S8N7`P!c@>;Hen^9M1ByA zM)dB!N_cTTFwiHQeF~j_;pQR()LclID?%C9NWggmpo%b|K*1kEU2G9MisHJR_juu~+B7c2?rBCvA!3&mn-Uid>= z(s*I_>smleB-0OpDriXdq$N*)F?GLVLxfn7gwH!~fq+@cQwpthH#GFG!+j$HRuw^l zJ%W9z2!(8M>!0t3VL|~zwbWQzF(#gSwrqKtj3LnUTE$kr@#ovSo%4AcQayLPF;4@AdopLxc|ya_@cLbI$vm=RD7ml_r#fg)grY z%)NlhGnc?&@$=h@-g^qOIS5SutVGt%X?wck&=O@|QB)6l;hvWl7cp z*Mf&{FxkQvXrKB3gD%YtC_^0P@m2%c7XI~ep=4@4)xg<0W%WV5?8{wT?51f3*$($| z?rgV_ncP`zdBcj39x-C3CKo)DT31ky`pdsItjsaQI_fic9+fy^$gw} zVk|(X7f)V|M%QIK&CFo+6xRblb|K%Z?imL+-8iB)MTUT#!KG6+(?kK%+MM8<66%Ri zsLTwIDMmDwde-R#=+5s`9m;I(T^E-gyU|x7_wBeaj zwz)&bu2khTy;%?sHReN9NMd;{Q z$B_#U!4se2G)hTG*ZVR8>=r1nACXzGDJ=KC8Gj|M%Jj2Nd+jQr zZmQUf9Yk@r2u2XkaL8?}y%}mt&pnb5I^C(4UC&_Q*lcL7I~#?3T?|`}Zf_dQsL}gp zLPt#FqZaBUI>Z7p)A9A4S0|mS?WejQjG4ODyxfmFkNes%R~u7WJ?XP@E{W1M<`0*L zeTXdXKIV@NDXKa9k6A?(N#&s8>lLk1To7yTh`HC_Bqgt+D*qWb_s`53bb@tqAXZs= z5K=l6)p4&cM(_6u4o12^)!_K$@Dk#MpDK6rnaep|CW)5lX?)aP^Op^tv<~PQQkhee zTiE_Xec!rnmYaQhde+Q`zx)hW6$y18~u_y)koP&5B!SdWBYVvSQOx+ zDL+ktc)L^oT2t55?gLZDu3R^nBbdiz7`#$PUUA-05!xB>taR0;u6uJzZFEcT&Y0{r z#Ir9vxwu!>o^f3Sz1y7rETrFOr#&l^lHRA1AdnKMMv7Tq%f&lk#0cw}ln<}L)hf_w z5r~M-BQ>>@K1cj?Up4}Uh~%URJ;p1gWVit$5Piz}Bgp$@R@@f3ZyStnL&5)eXf$U6 z3tQ1@d{KYQB6J20k?D4oK7+}MY&+O*p}-oVVAvc<_f%l*zoGXTG?5oa7V>`V>f-yK zfJrv0T^3RzTV?~J`?p$MW?Kn?COEcu4k~Wm--oz_pygT*?~j|z1o55 z89>#a%4_IQG#^guIrGCD?BHT+;AX40z=&y1~IPJw6zrc=-B@kY;e&B&B^CjGjo&5l@J@d5}(^*;{FkCMU3RVO-Kyf*0V zjvNTcTXZ>}R_q<-{()(OU)cTro^^Dx>JjDcI-6YH7f?VTneSEp+Z4hmyU;@XhKR+K z zJIm>p-Lg>DTsdjz&yHoZKc1V$>;Lcj@Gmi8+cNOwKCfm(`{*i3|Q4XM#n z27`MsaY(i5-66`DY`MC6;bVH1_JmrK@(tJB#ln#`0miq&_XoVtR%(0+v25jn9t0eG z)N6g=5$2MXmt-q|)Y0)cRn`md|29JXLcKV&cYYyqj(P60!QT<#?#Lnw3OzyJAT3WtP6wmuZfp7wUo&tcU%k195BIuh5VRIBv z7T?*d6jDi{ZD}^TTOQ)Y(D{g0nustG7SM&@G|d@zM)6z%BD-fZjMM# zjdU?+r5qtfx8C|tpZ#4u!PQ~6^1i_q(DJ?Re=GI)sf1Qbtq$3}W4HLN%|sVfzdhvqWCKQF7I>4XAy)>PzgvNL$BE9MDEt)Kv;% z*MLvF9N%cg7#NX|x6?3p0DMVZ$JEHs(kL-B{kf_%y>$&U!Pe9yz4%_#|64n6pV#xBXZ?j57&o>(FZF zooFzoM;paV8!>(s6n_L+({LqGmDgKVAv4ka6lO+bdyZ#?{9FAlGAbOu#2ri@{hUBpPH1fLkMt7kcUcy{dOTQd_xsIP#NGhS6SuPmWEt8$x_#Af zb(!*9jV`8b5dP2>`3vdyXeC)whg9Q*$6;!UGPtYdDjBV}s>QMi z=*I8e*f9>We<@Q&R$Nx>CtoF!Jy0>J_LGa|)@>-^*%fB$%={F3s}SF=>fg~r9j&ls z$IT%8TQ=XzG;-4qJ|uDMM5)4ZP-{iy^JXgdDn`DPv_n7M=xha8aSxXrmdjpM4GncG z?r@$eOl707!VBa|!%4@w(cn%6d5+`x1){D3}X$4_Q;1=bPOfCJ%{O9X06}oyU2> z9e?hxOE!7Nzyn)$vA(Qc0%~w3S%>A04pi@%KaW=r48=L7&1EEDc_Ra*k=9^X$T=zR z9+vbQQHW{3N*JU>JPsWpvsf8P&WYVA1o-KW9(D8nx_fpi@SIcMD^@%uE&qayf?(X zj`25K9h7S&4r;&RG$`)dghaZn&!u*}VqI=jVWjuX`}O(WuK@jF?{~G1-|O5Dw~`#r z#5a>y>cI@-f1Ca+&=1moqrHJ>1Rm0J*_NwlF*wA(eGHQvou>6IP@hO$fxiiCL`NQ9O*k)@a(zyU;&CoP_$P( za&r;Hrt^|Wmo}uyfM0|c{q;j=r%-tY75flaBHTUUlOCDHjFw)XR*XmPtKc3=A{33f ziyCl5I%Zdhi<_JLwShSEI|X@avbOu5a1xburU}mtu|gL_ zW%V3?XgCM*;!qq0%hrKg>{NMh#8~bmo3Pn3z`Ix|4{L|z7ZxsN|AdV-uQ=>>=9;|B z1dxX`NL~;F2uEQ)cB`=qnj3`IX9$Uy%~w{Arwit3Z8Qbaqp`HM(o^e^tHQJpL`Q>h zwsOGr{$<1IO`B498&c(3dQ8>~G-^OcbsjFY1eCRipJi_?jp%(3DiiK}Cjc#P&7TCb zSYo~RiaoWR={kQ~I_rfUFZAqGU|{HqeHKZTtGW0A-)AuX^+dN)0^LrRv5nex@v~`y zEd0dh0H<6|8!B%Ftx#z>uv3=tq~lh=pNWN@uAbQ~ZSn~PQ36Z}ci8DnoayGR?V8=P z${+r?!LlH4+`X9@aKi?ZvTF`1kzJR0kJigP$)Xte+?@e87aY|cu!JZWcS<(odS!dc zclrToZ3fYpg2LY;P{|h;wRfHBs$2~tp#1_rfurNajt1{she!}*$#Xe?au$`CYd`wW@aHD8oxulgOuv zr~pz*r#6!&*Uazj@+i3c7bsSeomDzfSTY@IkJ+V^+a(am;%0wc)LY&BV+LxDoFZvq zP9X9ppiIq^u1CAHOx`qGQ}zL5=ioXuV72eYmxxaTo4=%(@ecM-&a9&fY7VD7U=vwu zhJ~6K9HQU6Guc>_VcZ3<kNtEm^}Z&lLl83(^g?1zJIOLvZ#}=r+$@uDI)PCJTo$|M*4Q`0`h{-@CRZ%j(!f5ZcQcp_bU{37*V)(3*;uE6$`SK^l&DRW z9I%UMgB!rajh#6T*AvfW@QiKRA&QP@l{V6JPve-el%;CJRIpqj5KpgOpC4c(nVgOK zlGu|(Lsv+WP?Y?`X-yo+ZMpuX2RcKD8R0LwS2pmp@Lt=gnvc*{R>)DlX69fT?s9za z<9Qp!oxEh}C-ukTJN2nF#ZSp10Iyw^q2_qv#H63?8%PZ)_cnL{k*h5;1!uq?_`QEL zvFw+~#5U$}jLD*=>4>exB%o!z#_W$mdm|QU^S9eH)gTUp2e*M3fp8wHy#2hk0p@{@ z2q9t{q~DwCV&d=*h|_2IvpBb~#C0muA=RMP;~jxN0EfU}ygz05NdbqPHhg*xbmxdH9F6PT@-n2R+53eVsylRn+&l3FXXHh z8=7_tskLaqBd}?rhh_1DIe2gv&;Y9Zp))d^n>b>jnrm`*yjKdcIRO*kd`YWFCL@DJ zrIk>_8Ym2<=bq|lI~e}Q_HFsw!9XZs2ShS0HOgM)ZI%y{ku~zblqYu=5UR;Ct3V*olZSQrvQ*wE@_fe!@q=Oqu_Zw4 zZ^y6q=OrZrhNO3sBXA^jMTVC~g4DpjB5=7g0U&zbSk?e|+KS1AG;{GoUm%X#*gkR+i)fAflJAov^VuUoip zu?=U|gKp~SvfB9iwKy^&x$D4w*?c5u0pU;6YS)%d!nAqI4A8}&KYt$*45~i}VJ1P& z9Sv`~e+tyHnFq1==V3@ZlM_4dpCRqU69D6cQon?3ic!So`xNL`@_+~n=0Tn6cydjF zxC(3r36+Y^v3$g4JM0QjeJV=Rc&!dUg^p5_Y{?{w>`^ClH2_w!ymIiFinNDP7C5-c zYWp?Mp4q-eo0a&TkppBVSffy;hm!A?=gqc+*!*xNa0@Li5FahJ;6u|yVVMpU;|+Ro6=r4>6n9yX{z)vL-Wy6Wycch585nc` z{S}cP@DD0)i{)qAv8iX(x-CcWW;oWwyA_*AcIU9Ktfzl$__>oU?7jFG=zf41F_9^;)V zUG8&6>38!`YS6$}0abI7of9*;7d*%kHcW((78_do`26MNv`w!sXxc@PCq#t&urORu z>7jPx`?jH>Mop!qq+n%7N9EmW;UULB7y#inLbfIMD--d@RY3rg^)=+vBcP@4L7oS@zr;ZD97opQ zUNfz>FV9=G=oAurF~6QWuuvS*M=#Si@|`*FFUSbbaJjCbh|;}%e^27Blkso~H2sps zL4)FxL641YJ{+>(ll==Y@V0P?^tv>tH#|JhiQ4^lBg(IVQ%Ir+l%t_}P2B#)GqCnM z4c!*THRd(MB2vq;E8-Xj|65+xOmnXF(N7OZQSv~f`9_Jm@~_||+e(L~%5RVyhk(Aa z;}8VSqu)jLz8jxVOf88g8R(rn%kw|tYnr{|I%cv;W7=*as}Duk3-6crl`aQOEr$g^ zv9!3EE5zw9YoIUn-(Hil$W{8K4<(;||HuHFa*?PkEDU|SXg}#KR*aLPN_>*r??L73 zR<}m}970mP;5VUJ2>1`gO^E_(=4Ro2(^brXV;lJ~yqObO66W}r^aI^$uRPEUFq#cJ zR+a;L8U5AI!yV!NL47P<`V=+eSp@m`5H2=yeFZMEtY%Z8wF3tYR%aBn9eHfQJgcyF z@BkNjvaTEXhf-C8#4~FcKU1nnu@yefonno&(^w|3I`=$~BRI=n#dPhFMtmITMOIX( z_ctrDFTr=}T(xzBzPrN!2}X!_q?9~WT`yAtW9M+~U?hI{LzOO2lzbe3k+I`J-nW-d zqtLzX=-zW8Q|eD1;mdUBD{*p|P$KqZx6legNLKz>0XVt9OO+$FS6MMt10i z@}0g9Z+EmbdpA5fwQEnuj!4+?Cw*711=CI|Q@MimV!%KItMSQiLfW);3OA5JTkg7CIqv4Yu>&*ezMnKeq|0PwIX~#gvAlOE`^5LTq9MiG89g0JB_|IKXzwcT9jd1+T0Z>s%{!#lyR?MKcdb#O-n7 zsoiXFI@D!jmVI~oX)1HDJL<5fJ_Wo%?%sKhq<}ak&Q~+I0&tH`M{%#tsh&t?Xo;;l z^%P}LaN~_RTLG@#!wMX{fAZkkDC|zdW7!Kd3o?uni-BZZn1{N)1}Fe|Fs@n|ehfp3 zjc@=a$D#0u_nb_NcY#up_U|FRH{awH1(HXr)Cd7feD=#q0nsX=Syl-@2oUy@%3Dj; z>dygRi}?%aWaXR%YvQ0s)c7KK^o&47Rs!9#D4YX}X;?r~JTYA%2xxrLmW7rRE;d-3td@+N}U$$Zt0c& zs&0M-SHJzz8-Gh3PHao1Htj?RVa6-YvtNqvir5@uYfg1+9?U}o+kVo;45TfA-tSK~ zvgge|K>&+o46PJOd?T)u-#G3*(3xt2+agP^()c|EPIwn_pt2dv)3?oRPt6Jq4~db? z1jkjq8|=s?6f6GfeUF+?80j+da?`TrOh%W+B+&8O#>RpC6^9rL{F++f9h|c4T-fTN zzC3HM`J>0Vqmi-jp;?yL{dHgp=8LP_`pb`3p1UG`nS;hc$YO;axH`HUsa@3qeHt(v zptaNI^hz<4ZH^s<0D_dHDF}@aleY|(ZWUib3oaWCnG}W1Jj_hlC?x&ya9DXqeYxRD z(>u5=xL%0!yJC!{=96wWSxvKvFGktu+nllq^wWQv?norWA0m!dURRD? zlKMWnp9j%*aR@Pbd$_qm%C+-HF!i)wq?8@hZ)-st7}4# zveF{J$uW~b41@2?+_Rd=)-R5bn04IQLj*zt6uA3UvEVZYMCLqyWF^oe(1riaevTVR zy)Up&xq7Wax_9r>>rL%1+9%z6@|5_YJ~FXpYZy7ywOXT-vH$cQsA(5L*BI1fb0UGd z!_#9lFDn_?E9=t86Xt98FVoAxNmCXm40AmF6T!q(YOnyMK|DeVGc9c*{Ga?>CJYTz z{)o>q0}Xc+6F(?3=&vyHKdx5y?)h}>-C2BB`qbL+$e)$jQ-Tqh@$gzN4@NHG+CWDE zSVYP$A;t*Z4%(Aah{?iJ@bebVqp=W*5ob!=+%>dLH?sx6TK6BQ3(4K`ucE-cSe8S#`b)#h zwx4h7*n-?CwdY#5uBwDUGl;18OJxiC0p3Bmlb81oMA+uI{kQ2~U-v!S9YHU&{uMr< z?!EN>LXu(A)o%3AlkZlUAu{HOVYu&W{kL@kRWcvSwf+@DjqtXy_FIgLK+V4zFWPq3 z{1a35(+&Lc5AVIn-=%3US)m+&(6TM=0Q{A(xmqzw^JtRfVz%TuM=nltk=fcXC#qI$ zKk%6?Giim7N>o(1j!$qSUVwrw_EvR_uWBn0R$kb&=$xMh8r81}7g4!$$JL0Kq`FJ` zWO+uO{<4&~nAUdtb&1yyr@y_gl}?i&Pp8k6Uv24@*Cto+Ok&zxsu`9f;BJ z4)b@Zs;}}horOZR>luz|F1BhH1|GIZyMEv3pK@(M7Ah25trWkV z*K@L?7^UY2h4)%FTr2Br{M1^=yY&>M`^mdYg0`zvtu6tBRAIXDg{61L0I73~R#8H( zP+m6R>P}C5XHWk!yOYAeL@S$5Ji}HaVpW^-g66!_q7Oc7;G&|+O8APu`n>UUL;uN1 z%_%C{PafDyB5or6(K(-a1l?-M*04Mqf~?U&()}Ec-@8GwmYXTRcH&z}fs$diuJW^K z5>@3{-qZHMM2wi+?xek0J~~P5aOtnmDD6yUO020#N14?tcV`vKPEMzjb?nJtFr6CR zQ%)EQIp^M0;ue7xSMrw(9aIA6iuVwsJLsWEsW5nkO;7~Bxy@?m9TONEIwZdUtH-a+ zpetP=uhH+VH&!qSq9mp3>l*@s)en5)zYzx9U-`~`QZ`=u0&m_LPNq#LmfrwX3fqxB z|CoW^;Vs~_-7N9h>x)$$&8<#ocHz2qjcY4 zBB*%YVuXIjnJZ;@?yj42jU=xxQN1$e*z*>%<(6eIkL<%%OrY0ctKMiOo!i0XR2ncO{~%?11>0cikO&~8%ntHDmSZ-SVQQ`~#;rma^u~(hRB`%}v^gWe+DHleLl?K`F;;8w} z@GcB*l4vB|qo}}xK)`x;HfS6oO}V@fFFFlxRQICSq7n7$;*N3Xu9{2JakVHwU6)o( zPu*AtxcDGmYHM_*fCYFQyL=ngH)_*@b&(6qhQZ@DYYK#UK-xdnp9gyl^wOT4x-700)GCI*zVR0i zQ#Ak0S?FiC>)V%7OBbxtOOnZPP1DPc!yo$EZoNyXjCa4pSdIF##oxG=GZ`eG;9?pi zUy7y=4Gj&~7qZeLUmxII6GoP}0pseO3+etz`qkJ?At@63cCmZL-3H22qe%M;lX>{8 zD#_FI3wLbZrkefSoHK77BuI1tO_{GK9zy_lPBIUS=ke-bPeVtC;FL1~dxkf5ps~;H zR5w*j3S5faMGEg7A3rgfJyl555Z2ON{Ixo9yN9J}IcGEqIuHuL&;Ze9JUz||2mx3O z_ma@MLvvrhqn=(BW|uIV*Os1?QgGxN==p0vw-@HBA`LDN7*a>-6qCwt)L-x7kaa+{GpRm(ppZH4+#lqk9za-% z7AU#E^4^6`EE>m_nxGm)KTGBU-`3IC3w;qEEorr&qe34Dy4})?qX~{#I1O%EUkJn$ z(kcio7f^R-qs7xEOI$(7Svm`Dh(v8n$Or$j4&1M zPo`ZzKCxT)8z97}OPvP%A1aO(3vTyIGE_p89x$1XdZi_OOE240JcAu*Ya{M?uzkxM(BTge*8HgVuGs{lSgVd*FvC&7=DsQjb9LNq!Pq%&7AARq#anCI+zw3tUfnvh@TDN>PvZIuL20PyVxHs_8y7T zvXywSBsAeh)H-`$pl~+6Jm^ki?Z1&!et9B*W)*M+>?@QG3;AH}umw9D1vI+Du(k6Dk#Qg$c=HPQKMi(4C$foPC`=mH* z#O2m|chF^W{EFiNg^uH$b?is@hu$QxOvSi2%(rD@?OGZV843k{o zXL4}d1$M#%ycM+g(;(K!TYz+124jw^%*bdc(*M+WEmg;}t^V7zC2$Jy8RljG6PfOo zl-#ugNbRdfo(hYv-TOAl!{)^a1AtTLZvqK&Ajx~|CGJ*NWfX9A#LI~QNpyYrhCpBt znw%X0h<-vD@B3L9DW&Cjy2tjLaw zmQ^CAEF$UrqT}^scvWVwivP$UmQywPXDcfz{B+FJWr1w57*-04#A_k{+a&SQ!O@r^ zg=+O3E6@>35Nq)GwrpGzj@93<$xQKV5s z)cjDTfMk<9D1Ov;VUsBFBg*Vi!^*W(v>04Xp8AH&RBDl)bnf?N4A)wbxBw|kqEz3i zcUSqt3I-t9q)rK?O}v4mqZjDs*ZMH?fF`n04=C-_%>`i@^}u@}a01Q*Cy>xp2gWZ` zm{aPA!y?SuxKywbzVW5gnCJJ)tU}|T`_&^qeY7fHQrx#8_30?qipT)9Yk$JC5S7J9 z)aI#-11sl}5y1x#{HZ2$uL)+h`Jfu*y9u5PzHOv{brvXBJb7oXe;vk*BdUW4Weq~Tl zA2QYNUv%L4Ib6!zfE)`0ZKKfl>Nm74VW{; z5pv#{Q~y?4vM> zNMkcrce-m$bcyiZ!c^WYjIw)YEA#5vi;m!Yw) zOiNi)W1VQ=q=?2Fmx>{z#SqIQ)Bmg~gT1`IIx?Jz(H*~Z<7fnG+2q93@6(Rs` zf~nNUI7#JI_C7}M@!!NC1X5~z32nt#$OMa8oCR~33AM$H5P;#0QSDlK`uR;O9l+;gn|-~*dE5sg`}5XLXvniq;Z zi&umL;qXD-%~j?9Do&lZJJ=;8Mx96<_vW0ML9 zEJA?WPh!9O6jl9xVC_l~b-4F05APch*72S~-5?OK*Yg*ZcRoglKk+1lDI7AJ$+w&@ z(oc0IjHD>pT!cmdJhAP;+IcO>DjC0KAyGKF5Sm}yh*H#C6|rE5P z5RD)@=wr%4*R29`p~{v-n`dBm!5$kG2xD{>E5J5ZZc_u`08QDd3si3z9}1TkC_Z=<>C_x+NqN3YC* zEKaXkz@7mqc`qu=ylW5_#bS33k2s0S-SWBlp@PNu?_~vme$QQK`=q0vT$*LB=29WzG&&Yo~Pl zLimP@Qy^HDxAL5v#zs$L{pVQRvC;81te3BsmskE>K`MdEyqQ1hH2N(v%C<8t-#gUY z%&Z66M5{58s&7ax_-L9BMf~vnt=04+<)vFVlh12oNM>Be8C2A|{ytnEVz++D3bC`C ziP`I@6fv3f+qZuF9yye(JpiQj#_>do(kI`#lP(zmaK zhXJvyRvM+)NYOlWXCf%QuhXlp`eLfwublcvZqYH?*^a3<>pl3yqSJN(tupU??(58J zgyGN^z-6>Js?~b?XzP^bty3kH-{$HtSm)+Lw*qct&_9f3djriuS@B};H$eV+Lo!yD zkG@fC0`&Y&B%?84K%0H{m{wBv5_hyS;(PLhv>64kTv*m6&G`@as*eHnAS4Zp1gayT zi&By-hCKZ*wB^4|_lVIW(5~j)uI?c&9XQ204vgm(PZQ(x$yJv29I^{{f1|m=J-7wn zO0903%8va0bznG2kfr54|I>*U0@#g?(bAg*J`EQnk)#`I@X9C{c3<5Jrj%z#KHI$&-oY~9xk@tZS%GC_toR8*(!VQ-g%)?IxD$e+5rOAow}HLY95%A7tsO-a>Rj{ z4mqi^3A~jC*5YW}j0SNznqzSqZV{?*c@!+2@EoCNnBj{NTEp^iA5KeI+#Ab4{QT{P z*EO)PS{27mKcw#L(u^%+SzqEPcV;QO^sN(>SYwDp>7iVZD8C&O5JQ9JwI;eM&a{IL zXYM!%_wZ!#JZda_IyeUMp^fqw*rgUm=~a2ftU^QbK(;b0QCOFW^9B|sp>|F`M4-!L96mX1l&%?G;cqV$VapLRf1qpXC_ z+($P^+Q))GH-jwgXp@g0P>OD?WClvpG2LZ&t7HgD#p>a#<%ixyqdDCYoDh8A$a-cNF2A zF_nQ;opNUhSc6L)4yF`vd?KX<@;NYW|+Kp?|UvP^#`e$n>{70AO>qVKPbIIKNBGjUBapLMVy?*Z@CL8;50)nwL;rrHKDUM zzLgBV7KV+ty8qZWS~}XZTo(Oq7uKlfOHVO-O3S<;uKHNJ&sfcSU2!8Gmptn3rdl5? z_a{8|1viBZ^9*tGlV$F$ClMq@c`+duPIo}9dFnus`za=tQyWzmM`gR7b2#}fYgm4( z_TsHjXKy>)YK6ETh>9nM9vI|&!+Sf&{FWyimmG##6rnq)C$_mE3_;%TF|r>jAw77s zdSArWLuCbY!sw+|@#hTVy~EAhNidbW0lI%nePHoJ(t-)o^YK*0B?x^!9)Lz-ztlY2naB_3oBh1Z;N)RKU z*oRGevm5No$>rmr>L7Cm4)7R2sus8v$q`qPvDs1&KC=}1EdZe$psd6|RuksGefgYM z8k2jK=Gf-mBXg8j`|Cci3*q}+yNM{7dDK2rj#;)|t#`0V^E3^+cHh+X*iSWySFgmo zy{N^Ox{rjEj)x^z5jVO;5L3!sU!OUY5+$}C7#Kjr+MT?7XS~)++>BmDW+g$C^`d3% zFoHhn(P=Q1jih0^fk8tfs!d9D1al?`&ZQm`aXDobMHPkx3rZxc%%iLLjmW22RuFaV zeIliiE5fsHq>X&)nI&qDE=BVAjNzo9Q5Z3Q7ZYPmnOed%p+%_ZhsKpbV02FG#S%rH z-sn9SU|g#m%9c?))?o zVFW!weMQY5$=_~m!<1Zbtot{|H8PTWKX9FT4<=uvHk9*nQ^K~yqeU!5F=M>PQ>FuXcg&B?vctqYNF>c`{#4 z0QjSbcI-BJnHHL7=}!-L!7r&C%aNSxY@%b=8^_68k_ke1;AH3wV(X$DVl)hR$*S6* zMpa+oZh^>|qTEVU`z9p1_shjaC>_rz6k5-0Jem739QzCb8VEnKrj8FZcPgZB)IJ$f zZdbj!a#~jKZu{X>H=9@FPx##-a%LR04!z=&-Gwvdp`LnO%uM~~4=$P|xza7QLtBL< z_Q0IvRB`H^8`&D2Bkk4dNL{72iOa)#vvs$}wgUBI+#X?yqhanp+$40tPEN0`yk56_ zw7(88ujWteRVgMH1#D4dV}D)dyK0}>v+S9AS*qAyFpAw%WHR31(yeKo?O7|APXqii zu!BR_rNyL*x)9hpbhF2&HAaT)fM(!rNNa@0D5ET^aZp!f={T#13qrkGiqluARf$tb z0-YFlHg&CyfqmL+RHLZ}fO?lplq6jn(C&+=mj7w!o}ujbNW4AVH&CcW`*rbw0czVi`}WGIi`OEWNN{kx1CZq$A<(Sp2;k#X!ndb{iKC)AM{6`b{-tY zUvEkeD|EB6w=(bKC>lh(Fj+O**(1QoNt?pNd}+CR+`qyRmp(_&=($dsK4{s^Un)+( z;YQ(CUS9nGPXAXGi*~n*xbvAl`a#_bPRH|yX@rpxBHaaXH)I@Zs$toC+~WHzW#*&V z_#mxrefxq%Lw5M9!tlT`OE+$mI~VbkufNr&b|d8m8=+l@$nw6gjaqVH?1O2uF3EmVycntu1=N28gWTHQ4mLu`m+WF=Dp5>dJrAl9<082?)=2?N%_lXOj%jg%J_&oC1Vhx)4;m+ddtaWx6_kcZFBmz;dEEx8Uv z|A%5s+vN7Y|Ee8(vSrZHzHIRqL-MweyNlUJhTRW8lx)R*pFSF;(%&I|hWCc((@RX! z?Q%?F9yS(dh2Q8e^?%r-c!hhfY&b1jr(q!DR)*DXySyOZ!wEo$m72m(O2$Gk@fWJ#IL})j|JQjxncb z-;%qsT5PdHw(=->hFVaI_>1eh^)aV6$wCohKsA^k8uoA!(oHjuUrCY9Ovjp4dyiWgP0 zn?ge)JMgi7C46XaNdAADt`4DQ)qHV3$iE;nn=3jRLPK)K~1l zDhYK3>gAxojIJK<*+OLyJ6GjlDty@YW`I{XF~Sts)a$fEaoIa6$(5vmpiAgQ zeP!C8foZ8Fs%>UsSYLL%-Qz=j8PxHb7xy5&RLWas%D@tVT1Z*&xvMvTO$8`- zHxEyTZBS?r%tB&hrUwk+`GMRxo1RzKHCefkLAQJJ?NG4{)f*m98fGc>M)aCf#@STn zg4uG{aLnYM+Xtbw6@$bLQv{PX@a&%DODhzpDrXgF=++`5tGR*#=`K(7B#>YYec?3z z1$RSaikD`aQ6gX^cUx)0_U1dMJP>Shnmg_55OqS<{^CL}Gz444iE(bi;vbJu=6M#l zH58Dh()l2bv>4)@H({Q)taw>7G5}zfzdb4Y9UcPxTUrBE))Wa4T=U$3U3UF`&`GKo zfWOSz-vjv+tH|y5T*sHacWkvR2}NR9g!k-^%TDFv2R>;a9tNyMkc9yIqF8;BE70%` zuh^jT0KHR5@~Im%#)+43TU?D5=vn60gl;lJ#G7jHRSKw$(~3t;fRalEfOfauC0%en zEI8;5IE6aR74dx0FO`yXq&Tre`)0qKb@Gst*%q(*{EJ~Z&K%2_HAEHWV7|O1?Vu!d4~`t>794F6=9w}C{j%x^I9O!N#M1l2d09*4VlQppM+ zminhK0TlLzs@XjGn7`}_zwbvR??QhGlKs9)DWKb*vG`PBec*!H7l80KH`mFa-lI0T zkFY79J$Zkx&B`d20O$dx;-?Xl-R@26(9J^zsU!HG(nZN4*Uqp>WhV$pk zoW3#txy_eNyav7G*#!u%$0=>X ze*u<)veyt_7P2Z5|62b4o~B=l{9$k%+A^jcjfuBAdiSLX2N=0Zh=l zVeR&vR@^FH_q%#A`6m8mdbNS&|8aEXaZO!K+t#+!1!J|SAdps2T0toyA_&Q6ts*L< z;=(FXQ4tU#BC;m8ZXgImq=;;ZmL;-AWC_S}l`SYjlmHO|LhMxs0>i_p)Y0UT>t%HMOZtPPLQNS#?6UI;x~H>0aD7`?FwmTnBsmiEC?vQtxMGl!+k&%! zqT{KyKwN0bch%Kqih~0vS$VTzyM7?51x_S8^M_{kEC5KZp|W_wGkWMd11v*a)s()c15Ki#97S9UZaXxw51%gJ!EOds|ysZ5mzNM~G+WglHFHz?X zblDxFWK28Cd&eqPcFna5DJhZ}a6dVZ4)-W-b;=oyDMTyq*!@3w_w})M zb^a~Bt`8#m_(C8?;00nSWv!3(`y&AP(T+Uks@bx|NN(Z4M7 zsX3Q4YNP4M=oQfCp)BKY7N_C|(h`@aRuMb<-ldmcrZxp(iJD#-2qll4E%z)sZ1YmM zG!%-c%o%pw_RHhfEnxxXIG;{uQ}Xwsk9@3Ke62^COBVv+KXd?xZCYc2tFHd@fg8Y`wFGiJdp`e?*yC5Oqpo*RFq2O3$JiWi^Nv=cC6jN zU?x~^{2MK~5*v(5mi`~|8{66%x_}yzcKVwuoi%3BYs8xu=JW$zqklqdg3vVbWZ7i+yqDkGhp^J~mZ;ox3jb+rxWby$Q2aXcRx|5?G>Th|NU~;IFM9t@>0M_7%i~ zF3Bywo{9FG*8x#I{(>@zPc;egJa=U)M(&7bOCLJ)$O(+ z@Xyg3+XRP@cUXxnmaC)G7PDwU>dM=t%n57MY|iQY^2?ZC zYAAyfJby8qw7bzcpta?<>V=qq2{)AI2RUlL;w6Y|CUHK*fYSrD4NVP&{b7|m`;WiLk$W7ou=Dwq+`0?2vFm6nF@w+xdvi^hW+OL1 z4|9UU&6w7kk*PsN4K~{s24B;niL6gM@dDob3oQz(>&=iXZy#K>$W``naLT(OK71oz`2#~SFpH5<=i9UeT%QtL1#c$OhEG|>ckPu{eDQF-@rNB%j~Ho z9*d*ax)y>(-i#S_1VYL@G(l)oV`E@l0GHu&CF3j@&-zV@R0D8_+6~0#{sR4G>=S+l zD=hZp?|XS6F**7V{k@&57KB1JX5h@^XB6iqVj(duoZqG%4bDa7lX<;1Jh&%1mh* z(COYYzRhdY)M3MZ0cinoo@N4&!n>5_k$+f2<;cgTBhB`9^G0UPV%D(c-`JZLs^bvY zr!NicpulDQqNfYQ3gBlQ0cF~Xyc@YGGBC99*!WW2j>1I*C#SaT?IgkOC}V-dcxh})EkdYN$xtc?+{Mbw$m2=akpIIhXM`16>tB(33-b&_}_sAZMJ zJ)4iZg*CGB$NjpaX@z9@J2Cmb++v%dByK$aVh#OX?4|#z4!?oMQ-5kT&cr5|{vyWS z!)#DeJMpWUf_)=V^nd}zO@8nLk{eR^co zau@8Yvwg>(0C+2`<$nC-Dnm@vtH{#Q4{xh*NiY1|si$JxBlbvFZ})NX?3Xq$?yA8q z+0e7%it(!-p$b%*i{FGp06{ek0Zn^gw#Q3a1{jlo9n&C3zl+2)yYRiU4{pW|FtYk= zG{H(Hp#>Ene@5lIyO#6T zMBk9M4>uV(r@p&t_-lOw+&|B6+>o;-A8;sM%Ey11V7ddFi&8+2#U-WsSl17)>7T6? zHoDBv4o0F=5Z`Sjas5HDuHs*KO_N!v52$IqJ0e4_@4#$Prm^&*l55ej(F3h5=>A+s z*G8ifJ_B+4_d63pOS-nMR-&vG?4qbzfiNl zfRdkkP;#rz)2ahMr9CF`{bro5-_PWfX=Z}%fP4JiX=?*82v`60I-(C&`Ht5BX`L_2 zV!>S8D5+Kk&6#L+_{5{dgSp`P7&ea$yf4?cJ>KV}Hy)P}5)moxe(m@g%l$#243Cp+ zoN)OF_8D&oUTs)2YHQ>n8UpQ6k0#6FS<_#sb2qA29knEY)xamyY{SC?&jD^#uWd{` ziK(L&27L-RIe?u27tt?7`;(P=uf~iUB!7v)gjYECvm%_^78PS2b$W2aOCnW`9{vYV z1G~`-Z{2Mp0$^FY;T&7QVEC(RMpx^K$r|B zuO1J_&8p^u&pFsK%KaeI(LMGgfD9sA(Pk`s{H4{#Hk2Vo}5KT!% z=6>iGu$=Ek+&T;kM|ZP(J)6i7sDLy`7vpmEmDnL?8Pzl&{SOOy{~FN3~jB< zw&eF~i!l(M{?y$rYINGXq!SyXBtrt9we7PF>p+fF4hlju6@^l--9AzJ+u}%9KpF;$ zpjxN|v&5L0|)TFTkaq&bI0;6KEBP_olX%yWVe?GjeZ=m<+}^!YK+^L8MZ( zj+%jP2r+zUqGh~6p4c0A{=>sPBEe;6U51#pX_ zn%y*7C;$~v9rd#|azVX#?e{uBt;@n*+s}Fgp`|>A&%?{TgV$}Sll#JNisb6^4Yc1D z7&m15el-k*FVz0t+#i}`0uJd-!N7tF``)_HM)VUVpI0wj7<~2)A9Z|6MT&Y1#5yRv zBpH_ccR*@-TDz;6j1N0SZG`uC=?;Cke_RF%^T(})`P!CCPtv&|S!wR7#pqcS9JXwV z*ttEmqtVqoZhbe?i?5E<=}m^8e#$s?Ml(5sFc4>Tpj5zBNy6LX0(K$+*QKbh5&`d zCl-AA{*T&pJ;sb6OT*koPsBl10VAumX%D0RmwwV?*QeAnDh3PR#5r6uxYTf`b1q2= zv2kq-0>UuqaHH|IEm(JNg=c6Wi$;_2@6dloKf6#p0-JThjli_>1S$o{BDbfR>>;+g zNmTLb0O1UWTCrY<@0MCVvdNkU2%yk6Kf>Q^=Mp|QFIEj%A$$2bPj}xF0=ty~#;5>` zlopokY9a-^*}@c3!+_DM7k35sbu^GZ;cb!FZ~{*zcz;%R`FKjC*4rb~^(Z)bg7z3U ziDDPf;*>UaDSiQAV2eDx%?S@($hxRcMB+=3hBgM^<~~om=yYz~hq&hq zp6bOokU06fTX68qmt~7ABu3gFscj}ZEd9{%+H5zMkFyr4!^vz=2na7QVw~WI!1i_| zf~WQYR2({>n%z(iSE;Vq(!h^m-sM*Tt=^wc2p4sK(mHfe7aoipD_JrRKPg;O zEZNj~g5gdb1OuEvS3-Ot<5-IMn51j$1y#<|#gqKx!jsdXZ30(RmOfM6>$D(v{dy*? zzVw=_VKyjovaIuGGG^pj?6a~D+@@~hY@xlnw=Ip*6elp17Y1x%x~Z+D+jcEAgB}+_ z8IfP_nf_)h9=4_cRqZucpXx#`oDEOv1e?vlBY!4eLRVbrE^e*TCSc`{ke11B)KH#L zH6iSgu)15760IBA!p9~;Qe~hBu#1&H+~wXo%ox(ZDn1bN0d^6WhV9}fHKCiXV8FIZ znreZ|t74vs+_DJ)z#>pO1r^WKaJ{B$GN@jPjhUfff$n(hNgY`eC)=I6x05Cx>+ov1 zIEL*47NjAa3VZ_*dY`1(<6EfK_+1A$95z-kn3yr`1PCdn{YU)9Tvt7EouTFTK53W4 z#7SSb$3xw)Rt*rHKLHNn_UF=%M*FMZ;bk|oQDIP@3VcJar5T@8*f_qWxzgs1SvtGo zV$bIG7nra0QuU+$QB5Y%3pp~d_P6<<`4SCYn)KgWUqBa4f{DDlM1rape zhxpZ`w;aE7QFQ}JH!V7wQCiSr#2q;kBp!{(g{pO>EK$}O=0{RnOf~9y_wN#V^cDdh`9$U z4)vq@pDJ<+?iulsbnXUl^BERxs-JTkdVTQ9ByN~7p_e2wn$Y~81WEry*c2V%(Udqh zG0pW7qZ1aa^(i>@#Vjs;C#W>-^bbS9CYBoPX+GLMH|5`=LywqMnTzZS7WSYH!bkRc zDsCiTtCeXU>Dg{0dqSf{0G2Ut3yob?V?K4G>KAF}$$$k<*@sf?$)nIm>?h`}@|`=b zh@f$w83o&WsAn<;$S^k97YY!pb*-hay9a2E96yNI{9^z_(r1&uo~L&l8!=u-&u%XxL}pe z6KMq#-wV0{HD?f>-6(XC?buc|HM+~JvjVASBJdl<{2vZ81Tk|)alhw;XSK!}R)0Sw7$0tS+?@E(E{ps}t1USIx#pnliKj_m zkL)Ic@ka9;oQ#@6T?IPx6kb5%pf%VaF#89zQti`ur7c9$h7^x|Luv9M-8P9x3rX;B zuf=OJdEF;Ht>=S?AXB-ITxxV{EF*2QJK|~&y>N_FBYIl(wdo8b7DZxW;BH>giHA6O zjP8?2GjH<-=sZoali9;AYKAJSs}XL~-V2-e_@5t=yeBMf} z!{wUhP`e)_CJPG{@f(XeLLw&a_oB3J)|jDA@>38{&GeG66Q@d{cgC~%O(X0*ty9tX z%79tjAuum}9v+1dOXTKN0P+C#{Y6kAJ3G-VIxl@J^>X^&njQcekMF=bdm+fd9s`0n za_%ZD!q3A#`$AGZ;)>HT14cD@I*JpxXXSDvLdChOY~w@!I9Vv@HNtBlo{z#u+#&;? zGDDvEwkh`cvg)8KA)IVL2R~Q*acC^qsP1V=FjZpN2dcwh=a8PrihvHvUhnl@4|i<* zD`PqG6NGw$B-+*Uk?n2)$e)fKOPiS5BQn4%lUYcXz#b&YdE9cPWgB*xc8BfISp4Ze zd1@3h;q}hb^QgOs@xYau@bQSCOMi}%(b0TU^d84>qerx&&z#}ops0BiA~hQOB_N`_ zHSKtt7yhhp0-g-Yf!+gJq@}@KK$e*^n5gms3-6num%5X>IBPMC${L4i>N5;PxJxYceTiQBWf@3Bn~9eTBz$C5#>DGq{@@a-#=oW~hAV^jeJJ9_wCpp3Q^xY)lJG=lKsmolIZpEj z%dPDB81gvr&p5KOm}EEumJ6;JN6l$^A5~q(tf&%?wOn!e#l*#PvQv3-d&@lNMrSvHCjy9k$H9%v1f{lGn7-A`IDMKYk zUttWC`Qk=q4%AM#jrk*C!dhSXq7OZ%bJ;&=C9(pdt4-Duu>Kg--u4>fAm(tvkH)Q( zPzlskX}w$L>oQP_)0_42FDpe>2J>EU5;(oY_3dN7y`TRM+o(BLJFsJ%59TwX1sW`} zwFT%97=0k^nZ=56grqErIkm%oV?y@?hDz%JFl{X9Nhq_Efco4izN}u-ZmA2<9B8E) zIN2+@SxQfMt=$f5WLu7>BJqwq)%T>BRHVx+YT9|xo!lUb1W7YpiRO?5S=kq{!ybzM zptzMhIRI*H@H)9J20e80gn4?$7VP5}2LuPrW9sWDJlb%1HLD)+p04!msVyL=1XN9^ z(iDpl47U6VpsXPBH`=pe)b03-pYZFs(PiTcWUGTI4f6ZKhG+;}1xfU~a@^N_?d?<#Fr{)r)HQjC|En$1NUm0$(X7E;(P(niMdZM)RpxPG`8MJhca-m>f{6)7j;3sk`y}bZ!m}!4aTUWF}cs@Rjyk z>ncJd2VCmNb2)i?0Ep~iby~PGIABB0)~O*q-7aSP4Kw5?kgq>0D*oq(!8TOe0+)UW zjEHBwW|7xE%|JFL{xhkJk(dlm&rVG$r^GvCtd;(=J~=P+jOh1U&?E7CC5Qfl9fKVc zXcZ`%yjfs&e(-vyRYlR7)?t8_%WH-}$8IMd_p*L5BBB_S@7hShCK%Z~YV@yq$x)slH%_X<-}$*RpxiTBP@=fw_Vp~gEmZuF;H zFFaQsYprlMnYeN=I-=a)b{i|)qrk(CZ-Cs=t!=_lmSe|XZhD8)9CAx-yNc`>!;AcW zZx5OEx=ZV#0mJb)ehXklfpY8Hlu=KL={f z0YN)ARCRz1k;1lik&8LV*4*L&aW1f-J`}Sf6!ox^h1y<&d^uvJidYwr?OgqqXY z8x?2uX5^pnM*0VK*YhheuzJmweJGlRh9JrB10Wxk z#m__*iK&X1#z#ki`0}X-F3wGR1c$_jsQMLmDO8$IF0N`=_wb!aJo=~&{2IAghtU%i zTqA=;TX4f`ReQt-^lSDg>J(ztp1MJa4|hh)C96QCNyQe&LS1L0=+A!K7xTVQn4`oBX~wb$oa4EVbwbm)9nOOL^;#Z7KG%vHvF&cCoK zvwjPjh`tixj18G@#W!#&Gmy^(YEEwitN#F>`GUim4GX%gUqNIpw!|sQ**XqRaprmb zu^mKAYuf?#xxSibr0#PBR@P3pb`JU?bC_-m5~QroR+a|al3f&Zu_hm_*$IxeQCaYp zxHq>fx}aU^qIb8+x=3hhA^Gc}n*u!EAK}|Ijl6Xs0x+N#i>cz(Ugu=_9!r9!gzD=8 zp~`c@TDx(x;!cCoRkK#vIF8&daXrT;41euU&qw0^?IpzLIsPc4|TgTJN{h&=i0{`KDwJ1tRS*^V`Fij>&$naLDi z`uh0m%A0(0ijnLgcpvRpalVj4G5;VUe zjo$3H7%ZobtH4kvzLP}tudS%_Qt}WwsDH_L(5yPMF6E+igqj}>m~_-jKkc-=-ErjK0@|myQ?%cI?kVQkC=SEYPJmhf2JoA5;k`i-6L@=QFNfU%hV?-<@ceI=g_yyxXU+|7aTYNqH`@zNLn%d-lFLvuJ)rMPU6rux3 zt3m4*F^)qFawD%!utZG( zI0Kaaa-nbMNmVmcah;L-;C4KspVaL>L9t;t?9g126-2xf;pLh_vlA{0Xe#a*tbTQXqgKP+Z_9Tjc9{UHNm! zWFXgBxBb4=H3myli*4R3;*dE`7b44Nxc*<3(X!;+Cr5ToPbSJI&z7@M`Pv?;us)YT z_e1-wj=fYHdI>dE@N_XgG0@$^+pnRAUZpl){_RkhR;&|_B0 zV8KX^I-p-Rncj(gY;awzg-AM& z3^AQ8Gt=Wa!!sFx;k!?^)Cti|Y1V@cw)a2T(-GI~}kQDs>=QcO|D} z>-{$X36c9>!VW?dY}S-FGfNcTxcTJ2MPh@{k6m=LCg7!`OFC!ZnmY^M>Y|W2{!%^` z5AaZf5|bAPjC1>Nl3p8?&gHY)iTX6ODsb^JOIs~eK;56Q{&~lZDAGv{yzJkLZ{F;> ze2%-g#bpb4i9jJ+<^G)7V3=MfGuB9W=>6bPmTV5Za+$vbEwZH|9zzeE?=XZ2vdvs4 zlgREwA|`XI%Oe}0GXy1-)c$Pko(FZm7Tn~DwW?D4wOTHlu3cop>;~|_g3)#y0qUhS zXh4hzRW(BMCRsD;2gnSFMl=57u)qx&*nTg6sgmzUCh z69SUziClzejG4(f@rs-WQN@Di>g%T&(Do}*lrdTrm5X+7mW;?`CB^IqhTq17tFoEC zkSv@FUf5cP;)*hW)q;#4NRY!UHkWH+iM!!{L-Q^euwU=q4a^X<=K~5ZNOkyfg`8jH ziq31qJDXu=)yM%(eTSQA`{t3(b_k6I{+q=gF1OAy+lgPIN?FA&pJEitliwcATqvpx z+9Zt=GCXxq`oRE|IZV698P7u=)c{O4DDYPy$&~h(YP}!=)v5f3HW1bJukq zev$L1@Nm7#sTV8)7WSj?0`JAcqUmiteXi67^bmL>DD6uShM75ii;V3L^8u*dhXQi- zB)QMg>!)^pg(M$Y(8igJ2$?~k^I@rLZRqH`OBYbTuGmacf}F)&We7S4{+Z_uvm>gU z7VJgn#1p*bdU8EZsh{sli0!(Mtl|?8sH^nBAz>?2E{$K<2{u(an860ef!NNrLSkVY zQr!A>Mk*TN37cT0QZQF9VpU?Z3U~R367n$wGh`GD8<2Q|1!)7DOD${$)CxSoZ`hNp3SUx`3iJ>q!6N-}YVU^FlV=Un%5)q6L?;i?PMT(au~4 z{7J1cZag-{- z&EC}AsY%Sb*&99Qfc_M{JS~&b=akWvrkAazPA%M|P>Y)){;bn{_~X!dQMfV=94HfX zQsO|3kl$k*C{D;5?glme}#=}(*8?i|)OYy*atHrg1Tj>J@4 zWE@(RHQbDy;d7sAFE3$u-RhBEyw~*PZzE@*^1ob7;6B?a$t1NlP~|Q_CHc5 zW;9wF-v+nAb;aIEXlKK=N)tQt8$(D1DE0Visc!Fop*zOOlR7@cI?TfIL3QR&ERlN5 zBU0Bx$3aKB0z|*Ij02_T3ex!|JEp-cMrwv$MX9G>V1rT1IwZb;9SbMt_*{cjH z06tiayI&4??epd>{iMyqL(y%Y`k@fCL^DC(phZc{eUOT003SGsUSp?kq47NG80ztG zaN5w!4Iw;^5rvZ_{0ic74Mth^I_FZu1U3P07zry6Vo>pfT2RR*I31OIclpuF66N<2 zi?-N`X}TD#pzg^w{iX3eX^oQSHFyPM-cOVX)8A*gxpq*B`^r{buNn~9D*pSn&OLAEu=e`q ziv9asaBSSS%KuO({0JBS((P%;9$Xu*J7_+hW<8s=@rTR{h9~yHs$EKxBPgtB$ns){z{Vt-RnR^_&BHT z`G?5n@3@;HoqsvAXU(&HyrBOiZXD^+urR^JK zzSy*&|EVr#5QO`CcGh(eGus(6cv|ybkTD29;VHKf0|e-1^vVA7q<aLojDBif+xxj4z9b61r5n!>pL^6+Q$tOCO{YO51$Sm;fYA2UzsE}A7%p-z~GsAx$ zp)}OV$=@deX;fdTF&kjmkeAZm7y^<9AN^?VI-i@Y+k~tIgWEB>qDV1(05|4Z8vi$Y z0bzk#0@?gNGPgH^gJ7-HS0N{2C;)rIyml+FIO*{r>l?eDg09o4Jx0+(7ozrI{hxp^38@D>d((4C=3Jz)8+_iYb9X!T zM@zpfGf3{iiaB@HbYux$?AMjO_QUS9Ot!_VB$?yaV6Ef%mRg`~x4u90`kpr8qaZ;TNp!{2-w{BfXN?P;ylV}gKiBMKP?OKScR8C$^*#qVee={>q~DT z4dfH81PRW65c?t$l(6gFPRv2WZHUEp9ND?T8MvjENC_a8juA#XAtI6qim#7CF37@q zNPhw{{@VCmIZH`cf_3uJalQUp)IDUQGH&Z6^)nLLQpLAUrmnNz0WC%x>bIqeJfKM% z9vVB|Yr3n)#w*{scJV3<(N3 zM+;J@iK%%v28T)1K=hF4DdUsw5ewvuNjC+^HtHg9&j%ebVE4s$fZs}5#Hj+OVg+jG zh-O`=zIhO4zvj7nG|QCFHbzb5Ov(xR605W-<`kmocD@z^8iiE|<1Z!Sx0=Bg`#FGKfo5<6Kx%~CKg#0fWP(cB4Q$8<>~Z5BYy z=O)}vXwWY2BU8__923$%LHa!Jv0(I&nxODh6lVw0qH)O9Otn6Ek!VSb1Qvx_wB@QzOgUb3 zj6GXVSI`wjNcUZENWAhSSY23)3;5tNI#nJ_Xq8nr%J|Ol5OdnxS4nOMagz;mGfcU5@v) zY!ytS!N zCc5iCV;IpAcgrV(W4#D$-rl!k{yo!90tQD*teQC*`Q$Ujyo!26f=Mb+-ago*aeA4M zSm`x9`_jKCJTNjaCQ!jW`Wndwr2W9aXHa{WKy$*mOiKN^3ELK(@j8aH;&l0<_H;m^ z*GPe#TgQ|`-Fh8eFD2#aZ@s(jhoJ6twqDJE6K0$AMYb8ZW0UqW?F0pir99e0c{+H zkrVV&?co$?O-)11Q{97|PV35-NB1f3;~BZ5&XK#84M*Zvtxwj8m4V`n*YL`U0CL-n zzFDLG2S7A4y82{7zc6Es+kyhCTXo}G0No)y0FmDXObY7*ErXF} zT|G3!9g3{2t_D_5w#Lorb_d1cod9iEu;sus(LD^MLSpDSpU~gd2i}QD_Pa=#sEQ{{ zUBZP0{X3gm~`R|#oK_3p!Phe0*p8mUq-5vqsui0@^ZjO=5#=&^_q?m z=K3U-1Zmq*{t)SIr7Q5ILqyM2$pgBORp7?0KW6p(Bk1YY61^vC<7T!Wa=}|S{ojqx zm?k8K<{GK}2H<l@!gyo(=WwG_~yGc%1nI;aVL+qvo1x@n=bvM$suQwTj*FKg91P zz7q*#G4If=5VmR2gj;}UtrFjQyaxmjeSf|PFOkRe4}FdeRrOXpgGMY4(fNtUDtpNk zVuK84!TMHqzdnn`D_q26?eA}PhEDes7lAWw^udxbPQiKWT(o9UOC$p&UZLCf-3)1S z|L)5D{cm(^Ws;_?`9Gor*8i#LDS6q*? zLRW#18!FlC^+Ah?M;Alw<`1r2A)|G{jtD)xEQGXU;D>5H>L=mYFA_pZ z?W-kgT{9oGDTG5Ylem{(b*SoZI#nKGlot65RK1%ur~7U3aAnGlpwkLqJy{kzw@|B7 ze}lRggmTNSXZ8@we;-8kM-fhVFFpwvJjCY+fzXtpf_CPx=0Y^{1i-iTEaXc}hDmiE zdtdzPy$C*wqfPO=aDJ+)&8G;1k8d<8j0g0{215b$eK?a8)8puxegoV=@;Fc^ThHKo ziDhZ4V>Yp#0(v3ydFigV8hAir3@uIaIm^?iy4XtpV2UdB2&LCP@soc~^&YJ&XbFB< z2AGwgyp0n2taaR85`&E*%c(%lnAy+S6aR_()^LMUU~{K)ggUS`$Tul>fw@KXxE*4v z)oFW6zb=8>T^hOy!gI9C&yMb8JvP&7q!bDkQm}cRy&YpYVveb91R-2;Q`DjBZd5V!f3Ce1@ znooaLn!$^?4?YYa0yQy9reKA$H$<*F8OmM%F5}Cxt!n51yEZ_S?Xy=AL-Oi{F#7>S ztC5wGt(gRSb)J0d)}1`SPtI9eRW?}9LieS5vspgsm=cgup(+AU2bOhpTDeB#glKRHX5B+MBwG7Cl7f^E#88puSOsd zs1UDbeY>T5NYZX;J)bIFNNWINJmIu9hCr38yMd|K*-)oW4(`S`Ftf-{UjYdQ(3!yf zPn65V^sUE3E&K@(05#txRn_TqQ%O$7ebrM>A9UBz%HW6y^>2AbJPQyY6p-#-JIOVMl>#G449Q zC^AFv*%ve=FeApKe@>lQ=#-`5RG}I=z>=62uus)$XpS_uBocw*#+O}ttbnk)=NyLc za^}CmkKA|PJHt^jq)Fb3D>{)gU=a0GD6iMD4)A7iv@QI@=~b>y^?c?OmqM&Y3|1XW zon8jAZ@@Q-d>q+}q2dL9T_o&&+jq2k{^J|3^kcbw&SMe{M2Xc5z9@}>pm&$20eT|k zGC1gWGQPp7+l#=op=d3(OC9-avVJ&sZ5X5qFsA4<*FR|jXq6A|XI7}^FB)EJd}L7f z#v*!B!IY|X&cxcE^8fuK#*DcDQ`w6%EWTg)HOw>uVj2ZtX?{8vc5#H~@WlDXB@r3P z2~-DixT<~)7TbobxGp>(l7lngh4XemVy!%FJ-U%~?E1pO@QbGQe2`}eQ*JQ%X&<$2 zyN=$^Ms+2^y0|<@{$-i*qGSKyBDB1mbk;OF0|-^1q@Z5?lvkAkRcI5693N`GlFb{g zZQ`5Vk(vYAyeW?!{P6Jc=vU-katc{SVsac7hKtkw5~kea0~5zkb-^KB`OF2UmI#1u z_$_Jo4PHtIu;OpseY=u`DgjKgVON52`a@}dK!X8D4j-7P(_&meQRw!9IAR2XtyuqO zUQN;>xFaw$2PPz?)P6-J&0qT_YVO34n1~l2=2NJ;_>Y=H-v;io5p<`aYXVL4CS|h_ z=Jy^WvqRV$jV^%3FQ6$E1=n1r<}#4S_h3OHV;XXj&EX~>>#oQDbJXO;73QOjOAlSw z#C};;B3H*oB9XzHkc;0;N}aL2DAjcFNw;o5sE>}hMrvj`JH}lLKQx(I_;$s2*{1cT zLoE8Z@?^#)&4>mBh3wO__NT@G8ZBe%&@&;jg6TfThCosB5X46QZwB0^Q_^x}KT#DrY?!)0Oz|43rP46-Wz2Gjx8`ytWpoLpH zBY2#Tv7}S$If`sHZ zOGU{uTihgNF#+kL3eaLP*>@7;nRid&10PlC{v$-FBEW*s5X{lqHEr>M6mXP#PjM|R z0sjVA+-P2tn*HmkG3sn5HtLNI(m}_)dv@&}O2vw3+NK*!lLi5gv#D(HPm!sul+?n* zBhRtN*dd9jyd@wX0nYbF3Qz;oLXOYeu>`f|-oF=rhwx%e>ZLy$_dwje5+*|jMag8P z*2dScIaEtvAbXkf`b?~Q1SQ7V^88WHi+GBwIZInJsiRVwnCP%3*=BHa`jt*DP+$~z1do+|7u2kgH;p+WTJo|Mvpmsnl4O>kA-x zRRF$RH*8|dVLRz?KF$kOuzXSTZVHgNbtkCFzd!xA7@8uh-)vb>frqf9ny59aA8p_* zxG=K@1yni%49Qj4B_A7gdgG{yCuwL54BVS@Wz6Zyyc2a7re4k>keo^XRyNX`e-83_ z19$8GpQP`OYwCR8wzaLb7IEQ1NEH8Tl$I0s@i)3OJAN#-ttZd3HA={|AQZiD)102Z5|xACv>|dgn){6cDgw`2>Hm^iaBL zd;t7Sp6k$DR9@AvHewy#KZvVo;*Ih<;^vnFKvGQy`{$rfn|l7sD`j{oiIGEU{cpuK zrY|{`x4zWa3^i!MZ7^$c#dN~s?0o;dQ{v~nmQz`tuh|a20mbZN97`(wHbMCdTdXFD?L9$QMX_PnkARkSA_ody2=Jn)=M>_gA&*@9Y1r;1-aJ`~!%J zyuql#DQNC2pipjemX|3lC9ed(r;)_@xm87onKI8V1bsRhd=H>h&;(6#wtp%Y;v9^z zq(qK0*J(kg0Wjm$QAd{5A2-Il1-iWZ9Y{Dr~PI84&Y$;or8Ay~(^9~W9xOVO{@BPtAF*A$q7jg$|HK2By?RR9HlvT>lwm)i&IZ&vw9kA7% zdD!ZCTm?6-$^*A{R^L?eBiUDCNHZtR3o|C;QG=5l5j3fl&&QrrTilMKP`2p?l1+A} zq26}Q+3=xkft6n9ng89{nc{Pve>PP&BBq(h)@rt@k|C1b0>~)%GbAiL5r)wjuzQ}1 z*|pxMf%~KVLV0~J_d=#3bkcs~&8XX8%?Zes<~Du2dh(Sm@JSLO5`@hRGJW>gsA>1& zr!Y}2Yj`*-Vd3i%oJy#jHYM#L1~aw^R3A?*shC2^K8?O{Bx0bkQ0BGY$UsDEkRO!~ z81_32aYMLI-n%O*mO#@Y?1ZFQh`SQXN8?X^1oRm}&%h|o6!YGz(PMz6iw(BvXBfzH7p(9XLe!j#>q@fB*_(YC55R8x2}Hg)K3T$$)J#r!vaM->82e=I)o9L4l`$(V=ovR8BqTop&Ncp|4Q zMbj((Tk$`o)ZtylD)dK6gCnTb5=yCm2YkRdx3!*Lh*_sJ6odT5wzlFvsFHXn<}%2= zW7?tzr0AB`DD5~zdxEqsU?l`>(8qrQ(&5gP*dAzb6>jJsl|oDuPKc0=!(edEhf31q zf1(dDlk>waE68odZ=A=s1TR zyVMO#mc6EV%B=6+S>d+CzrL#Do7{GB-|4p(1B1%~V0l>%b4W2H2;X=mVs025Exp{- z+ytHvAaespTs0RM<6;0dl$8Kh-n`7KnJ+(DNgwsS%2Nd{lccf*=0KDhH>>U$nkH1g z8aeW9AN>o{H}SKVJ6b_gNd$ztf|xDJPO-E6ftbRhL12wZ%w4Cf7|$V`tx_VEp^=3V z6>r?M<)o?^`Z^*`2o2`q#a>tz{@@bWg;(o8y2YOVb?fX<#ucC0M^PJR-5$w9?(-l2 zu5tKhNBT-sUpMCxSA(1Pl3u|wVD*TR>^f!09N`g{?egxb5X8-u1spvKW-lqGTklk!r|xSA3zKJWy~E6 z=05w=(BK6!pIpWJ4r~dx&^-7ObjOAiXLl0U!W_cMAxTX;?$6E5W@*mb3+Us_tq}ne zmI>F9K(SzAGYveejceFHD2Ljmj=28QyHlI8wVQdSqK6E9!& zhZ%F8G}pjh=Ree5n*W7QvtOBU=JlA0kPi0#42BT9L`&nZ-2$Y0wXe$druxM`!TQnk zWrkx*!7|nVUgPqS7&vtVgJrg^aNt!V(2-xwAj=u0lz`b?5(4#|-rVApWtd{Wg2=tN21}mFp6GPW6VZ4Lj-TT)~lr zq!s)9$_(E5L4Uy95jqIh{kOuEMk);>SGdiw$`C!}&3d;~SNld~%|yy2SQqhW6jznz zGvmy7xX}xPgS4+B+~%&o2AFYNf^Qyz2ce*Tkif7v40<=3 zgRa5uY~|DE=<08|U)p0!TT9QkM&x>)cFYKxSLvT*sn8>g?i`QtQnc@ty*&8vfc<0# zE^UBEs6gQy${ume83{gqZvMJ#d{3WTJlyIB8zSxCrKoOJ4L|}mz+H;saYIb$R>o7Y zBK7%|FlMY{Rlf!%axWO<1n{*tH+DW7dWm0Ro&MzJT=7mrM;< z98Zz|PvxR!44nE5Wi)83*`Gljj9-Bjf-TkK)X1(wtjDkl^qgwV63bsjpRk-a*4V{t zspbR`45#6k`*w0-eJ?+~Z*JX2o&|m|%Q&%-@I2%BIJE@4#i%w)fX@G&pj-hD0+eq5 zdY;>dj@9=Emhz=#K|WRSC@C|Sq*xgQJn05>!g7u!B7C$uCU{FK?2lU=afz^t*T!56 z{3^09Y>8$1;qQ;GX~<%0)=aRFO%TvbdUwPaY-<{a2&`vkiJsw! zG*Y0iQ~Q&GyR9i*#be@6FD~nLyD=h1bW*YF!9}s7{g~X-g+Ff z`P=DZaLT%rBsycIFayMhM!smrj>U zpb5+S*x((xNLR@}KM@6BxU91*itD#DJUpmaHtr&uM9_uSo#o1bb3Q?Zs8b z)x3x~th9fAWTQs_{l{k97qu@!rVXuX-F<$&Z<#XKzJG~!5BG!JFK@MsCc@vQK<9d) ziIDMbLsw4B_4HM#UAbE<+xx?wkk@F>)fOLjo~_M5eKsbCr>U#)X5@uWlq-*xS7x`^ z^!^*Y)AL=z@s~fhUCD@|wl9z4SoaGiEsf{4pqb-0s-4mF)rI-J+U};LkET_UrUJi#qs-La+RkyZa}#nh}RG0vxPS8Xs*tmF4 z`(1fKuxW5%CQHSyj_SKgQh}wQVGQN~_A3Yjq?x0`ymwQ84Adzbp7iv%3~sl83YN(4 zpdF~gJ`Ha4FC1)PqSA-h(&s5*Q+p+b>aOTW;D-X|w)}%9dJTih9@LCn1y)IwK7P;R zrprDvMi70fM_>)y`sDE2BmfZEsv=lYf-TP9;+X;UGJ*InsC*d^L#$xyxEEZ#+KP!7 zB3qeY)vPIJILqnTZSReJ)R=I2xBgP&ykb4x_dNh&S~2b1_O~~G?X2PibA=cX)NNb2 zaCx?ljo!wg@DtQX-+FpgbjP9$fCv1EF$uV)B|gErG-Gn46BXfwtZ@uS+m+xj>X&Mo za(8i2%r~7x_rcEA(i&1uvsSH?d4wPSg8S`$TH!>%WpUp?IrR@RF{L&QJs=#+5 z;37qXsjH43zCOaFs|BO_*#LWJ-O(TI1iJyv$v-wRO${@OsJLA3k+VCT43TJnk*=Qc zIurHq;3<}5m?DD*!83l(AJpfH(d!|Av1zH~N;a%W2uh z^-q&2#ZF~bx*HAZN|>fX!iqlt`Hg;o^`O9b=Nl6N=AJ6Z%@6@ldGAr_I&aVzXAG31HPyW2B~duiknZ>?Qhb;_2LnbRiC*P zxe?&hV#ALB1-;#aVc-KXWH*0Ec>;x5IDuZkaSD}OY`$ws4=PR6)C6{nAQf_QClb|{ z$kv^6Og);4v#o~MDt=;m2MO*eIkEE;c;^NSp%0^^!hvkLH#5(0u#?Xf2{B4IuPeyg z;qRdIa5cl89>gr1J}@!7aCA_S-!)Rs$+eoJ5NlsVEUe$#$p*dc18J;1magMl;kYCFzF)u7%s+6Lnh8 z9kvKifttVEZ|}@SwugGOM1-jR3RS264A`43+2IKaJ#APPg8@UEBmDf`>9KG)_KGAM zA?7wJfFVToj#qy-fri0rWmdMICbpp3aqW7n$(9eLbLpYKJ2gXf8a7_3jT=V+9`brt~QsA>24E016=y0nuB8a^3dj2ukt8uDyxo?>tou;!~#BUTY| z$eWsDlDdi{98OFrtHC7eB7GIW>K2!-8k3jM{|ArMir6ab4_MY^d}&MoJe_IJGFZ$b zG$}jutUkGFan47>o_+hFL#VK0${xaWj{0n&$()rgSPkIUaD{4e;6zA$O&?trkK+0; zGP!8SxfXRW$kU{c-nIk5aP3wiR+OOxN}u#|p^)>yh!%GBJlyO}hccA{hDzswSx_^1 z!y(7nD4!XYO4xtQhs;j_wgH9K@ij^qit4_0@!@Z`B9N}g*N#a^bPmBZmG@|0a`x>X z{$7Ud$q>7%B*5VLy>w_6)gB^1447)w?UsAqOF7D0WW8UoRtS~1unW8P02FlbLnI+u zTdG`op6k7B$4}>^R^W`_&I2U|(T?2Bl!k^GjwS~amd_2OhbP@MBCG<7tVIc6mGkd! zDo*PheRv4&qgZ3qf=b=acj0@j32b$)6JnwQnK|c^o4Yfmzzaz60IbG4as`O&xjdIl zL%z;)Im~p90#G|i15I752uEt9&=e0*V>z=WgN?@Y`tqd>6q4Fd#cI0sbvPK3hoJVn z2d9swMLhsh{saYLHJ$KxrZj~k2haFvO31*F_7De3FMpDzfUiA9eUXM#UaBpqHeVfO zKc(6uMnw-h2J0O~c~CqD5zHFVvs%tSyS&K4k%YhHcYlf1rId}>=CtF2)9JF1 zNnebjnqk}BLRLIKxYy5ePHHQVu!T#rp&Xt3%EKtTShVySz`ad=?Z(3t6oua*l;=%_ z>7Wf?k$OyyxSz~}@wU*SB^T!@5uP_a_~EnD%OJ94vH&-EtL)7W`vzZ`fE!WuQ0XM0 z@?&icX~B^uO{Ut*_;=543q8&gYhFky?HDHq^VSnwaggI9-9YCmCi|C}0^I^FqIpVL6ed*Ph)L?W)NX zP&3&$K{Cy+dfTR$T$EkNfH{QP7w9u4fF&~cE{FRWuk+WwlQsb?5ANJ=i;+>-KMhVM!q&o$-aW z?Ph11T1EXPeDiSD$r;^*y=GJE_J_F4)bL zf%*DP8d`!UnqX`-bl1-7g;SWsb!^8F)Jody?db(H-()eGn-20C(^-GOof#5b?DaVW zD)HZb4OFw_L_58r8|Ep;nGYX3KXcEMpH>Z zPa|=4Ee%`8POkULw8RaO*T|AeY&*Ln_}@uAK|M*KwQoNr$_HM=V2RI2x9$@t_ue{H zq^ida8>V32A|}dmf*_GGXRiG{@=G5?DT?`3Oz1C5e;&YHl@`Vi{6^4p|mm>y($bcaxrx5bY;-}b9+1X#F6^#mv zS(>^z;#bxE%=4;r1zi9vv8qik&+NsTeVH}hD&U8g^~LNcpbA!G=5_a@F_K3kL(um=MKBAQQGuY7Kr5i>DhZ2<>qMV_S`VnttKG zQSX&?qyY`xr42lJ98m~I0@t%+)bpAFcj>gPnw$_d$PKnwYkig;Jkrqk>tj&~O>i*6XtVFpT~bR`3+9fc3c7I}BZ zBVLq-EHYe(IVK;dd5jqD7}e1A?^@0pP;`CN{RhaeI7H}4Bfc0XTcx9|eo;X}&x)C< z*|Q(uF*>sxU=RyH`HW~)NYWF{ofbd>*w%>Fi7jo64H5j)eRycnBrK92z}^A39EpUB zAG6i5GZY8fvzZv~FiwlQQkgXF-SDJa%;CGW8S!GvlO6}JM+}r7Cq(OFeBw{^9)14U zh;bL*7?vcJSdN#~6_P0cxcy%Qtxexrj88CoX+|L*1H)L$^sFcIo300?MVh+#c_nI_ zO0aR`WPRIfX0oi%dj)oD4r2#SN;@vTA$xuXs-z zExannyLR|-2&Z&AQ`H+tG>I|4RjXX1ITAsYcbONyJ-P8$r`;0xwP^F9Cs$(#l8xi( zDQ3L66w@}G#6ldLC<6u+>@QgW3DPQQMzUO z8rrwYaA#$Ln9Of~awQ-V0KBWyQ=0XKTZVi;A;EH-YE8u5R(L3joCuEJ9>bSsqS*4> zlIK@r4x@htHftfja)`DuH}h%tXM0K)Jxn1Z>F{fiDhl4*)n=lRnYwFOpVLzMBrvp={a*$;nD*fU)^s0*%#` zmWlEUAd@xL4}OIP=*UVaZUAb--bVz|?wz(8*y@wI)d%e|VQDW*w}i>Ed9SmKyQi;V zf(7+eA_YE$+Hi~QpFQ(E1bF1BE~YmeYc7SdgPI~rg|@d2KXsgX4zo!?tAz;4+^)15 zj}a|0k(I&&J_L~x60`np6_0jLjLJW%s5iQDrbTKsO#87}>{a#R`suCe`*67Si6xwi z)hfs(Lw%@cfYT&swhmdP ze4xQATUogWxRrX}^{_Pd=bOdccHsA_oh0*uPGWs5#90obNdL{Yw zTV?*3SLhr4l}Zwkrn#i#jnyPy{}l0%f^)2I>V>YN(eis1w{e5bx)Rd7mgIDg=y&BF zkede$u1T@ebn4H_%;!%*4|wzi@|A?wniROkqaTlK3M1=SCNvtu z<6bxo{(Qe#em`*t&1CxOGVfA1a?Ju5>t@N@h-Qe3#EaF&9@6C-T~n9QDH7j_s)6`t z${Su^h#LWf8lw*&-;=hE;GL1iQ#0z%WsU`Zb*&S9<-(ZI-sqo|zGS$T^9UEW*!A7^ zrY~GD56?dif7J$0aW&k2?;7a)2~Wb0Zd)fJ(r}N)ORh%XO98F>s)T%@ZA@IEWoJM> ztL|*MSRU!Y$Q(9%yG*F<6Q{thSieUq*|TEx(8+R2A(|ZqTyFbcf|Vy@!5q46vM20Hi!~9`a?z z+MuymESeczh<(6Sna4etu;3)kID#$eH6>!jkV8%}*Ir{ivpqf;xiS%)STBAU0HEf8 zdQeX39S=_&_~cxoOq~^9d%d>x#D}@LKRBACojm~)O4fQ0)2_v}8{YJDP1+6y`5t1P zrdb$tQ+&1Knhy*x`=1~UX#DDNe)?1$d*j3m(X--Pu+HT)HyFx1w~xFBdiT1K=-TTi zf84vr>%{?bpwhS1c+-Vd_sq_?@2$K`5~HdA+p}v2uF8AO4)h@?g3Pr%e&>8;b2Srv z@_g|aLkg;@Kc8ZQ7H~B<8O;OeEBjs*Za(;~{>pf4+j3Y)T}>8uZixN{j#JCad zbGL2$8k&_t$#!WQc`H=8Hod?~W|;5R|c>pmJ2?42d^dt`m-XFo_E zx_&L@l$iF$GRrfL=PAa$1}G<_^dl!f3cuQV+H02vTOa7n9)nv)?165w-;G7zV7@XB zI8w=MFnz8|+|bs*((Cgt=H}-E=fMx-8VkB#u&=*XPAN!Y9>kRog8LXm)&|H=7+bLh z?FZo%$m4CN&k~Ba+BS!FWvlrrlt6GS-`!e>ETL?`mINe);nB)(h!=`TBOO>Mdulc+ z`dp|-V@mo>mk{h3g)YUYjcc0--|p)qy~&NKD_t1Ee25N#pw*&fe{Gu!0@Kb=Nuw$# ze1VvWWg@oc?zSg7A{AYfTJ|`??#)Z7&ioD;i?p_r%VZ+g|06ejqxnDL;_AwoUArH2 z+_h}v#p52ZgRF{%`VLv)qAMBev1uD|yM7mHMvhF2?6UTrmLPj&M0c#oZASR{1)I(GV6f zgIEV(pWEh->a&OQCQdSs%mewiaELMZ{=x_|9oeYwA3_6vv1&5-q~5CqxPBBke=FE7 zgxQkrR96MLQLX`;R6g+hBNrTiY_2{Ce-^x7h(&MY6!`yA^m%S^GHu=Tk$G+^m6@*h z#X28<2dS=^Qa)(CJvYpm5?~i@EdkU>QCqG`F?b3uI$G&L01@}H&^_bTu;IuDpG7uP zrS5M#lbo%()~cddcb@Ug6-WM`vRIKJ@rj-a{imZUVq^xc<8AsC*FqnE=} z@>?00)J`S`^tst@p{~y$)j|^vC~N7x3CEq}NU-$#c6w(gc@atVo@l@BgOWwH(x0~I zhZkX#2*mHhDB;yycp6GVBmy45Zx2)AjQ&mY{b%pS;Ie3b@qa76wp}Es9#4Pn{prtr ze0C1#uGYy}MJuJN5^MYZt_+r#{(UAem~+XgjQCMu*Di&k0eHw}kQW1v+qx|_M}BiN zl0a;xbCzbZXwEebDPEk;NE-}FU@d|;tw)cE0)6n!2+U>@`!}SWUP8moQiy(`c@G|0 zZ?;e0aP%E;ynwtj6_eVF`wq<4n8@Ypce4pbI3@=~}YvZSI)&Mxv(AD8H;04MC-ky@`V+OPu z6_kNhx#$et4#P>=7?}0=5an0x5vBXwFw7VHr7-HN`OymcTa5gm-^H$6w)Z?|Au^iu z-wID{l61K6(6t8*=}5F1KJdfZU!mY&X==`Mi@%I~xivK|4ipf{nD*!0+$rq^bQC zCJXZ{y{39)?=sLCsX6; z-u`(;kp=r*lN%GSkVODkQplLCK(2QRu(0=K7MvYT08j|rucD}WqN#;Q=s8ENpCa1r zB!aaO;I6^TCQYQ79ya zPJJY+T%J=B=kg6js`kyr3O}pAYm`Ve@pjjU@Zl4o*-KFT1f{s^V=>BC1#$>cw?s1{ z;hUne{?@)IUv)BkVb!)!$InYIr!EJ%cH$nvzYvOc zZJwG`HOOQy&{`GqHV^ccxsZUPmS#OLypab^lHM7DqjF&9ooF3dS*1<`dz1y{jOi{q z^v-!@16X&ZN*~Bt{TU%)-@pciqTOsYQ%qPbOpWA^7QN_szg`I&g6YtYC|}sV`T3#4 zAl)7Xue&H(os;KcH_GI)r*FQ0mWK;<);s?GVaykVDPjs!Aut%Sa&E)A;8i?29VMu? ziPqeu9q-`uBUO%4gQT7>(fpraVrz0#|KPJ>>D>F9U~sI67MpTv1_}o?qhwkk_@E>! z+Ud9*3L87mzH2PRY=5t6354w6dO*rVSTm#Ldz+31(iSt?UP~R9?b?6DR&kSmDAnur zy! zX8}iGolMr z96U7Yeb%gQm*LEE6sXl_zT?f$~W26f;|d9P*aJ2KCdo_jbE7JO==R}0~b2-C0wYm_Vip7D_? zv-9lqZ^}EcLL~LBm14Rs=U3^iltt%lX&jvEyj;Y9`fH0lqE$NGxIYv6oFdm~-MH0_kKpOEoi`HDx>*Aus6WEe>Njl3}pW=UOe5X2_fjMw;xHc9s{?ZHosxA_74`%mI36O`P`))k& ziYHDQwWte*eN}uT`;Kj9%8KjDv%u#pzViUQm1R1*k!4ldU>bIKZ?c zuEd6M725MDPdx!Gs<54KD0r*?=4#Bh1xf&?rY#jpcHU=qpSkdJ)bAx{70B7!yVOg* zr}5;)dS0yLy7=YHd$O7vIVG$2rqVti6+ z_N3p z-J%n8OLo0W1`;J>Gx-->Nuf9BwyKY^FK{#l+cETn^!ne<{dFwP!WwGS=beCaP2Jzq zn`#hkvP;~9e~>I|O?zaPW%!~RkW-xg&{mPDS&$7ZmlFL7j;r zUQ6h7U#cK6xx=Z!3_MOhPJs%pG?JZWy+kp*GE8CPWaho3_cM)1J2=9w4Bz? z<(!hd+<2K<+FRk%w+>)=B^zX0s6_L> zJ$E2p?W=04AL1npd(4h{91NN|M^XiVvdK!6niK7-(_}V!3EMhr`_ML8t#-TjIiRNo z*AT%E{<&4dR)UtCT?qK%O)KrWK7Ee{Hl{%4fOIWb^NToXHNwU|7J9v}xartsvg1rXH_P;yS zj8|4@jFctv2NL}K%Sw&rC~}Rd3@i$04*!Gc%l;$Rt{)J0=y$1W#Atq1ep#YjV5#y5 z12aS}NlG=3M;zOl?Eq=+*8F~%?L@udy`{4pvo18VYFj=0!Y-tp*til*&K}#kK)!07 zHMo-qq=H)`1)>DsZCy7iUv82p8O0yK8IBh?cNU^T>JHdVh!wzI5V7b=#%+gf=fqz$V(;;BOE;l-|4|=4!K;HK*p) zklq->6W^vM5O*jmoRt0JxCHM*XvY5$>>_NPC3{fzohABxc!Nl$2U4^SD8u}qgvMwM zF+xGo-^{#RA^Az1nm*BuYac30dyc(Vu5tcA)Ikn6O0<*BoA7U%w~2YN$Po#C!Z2CJ zTnf}47u`Xu{~Zwm4&kGQ+wcV0_i2Pt%RqR#njxplz0;HZ!0jhINEnz5&g%^(I5$H#W^XW{r$K zdq&t#Y?)7c;n8|6aQtN(J(;+U+ELrMqEwzAGo;-vN~zu+^iRH)?eSND$F>x(y$0Ex zRrUx1o4~%}`|X!P=SUK23Hnd0dLV~d1t{I{B6p35j9q$JQO!uU$thlsfWx$EvCFb9QdBL^x211ltPH1W>?|t$h=b7|b<#i^oq$sbhas|e z|G8t&w3$RO6NsOwaHqM!iNxMe5CG}EB;ppzdZMmAr<8xcx}r-sj9?Hq-wki^XxZ~{ zzkCs^-Z9(VZ%K&|fb(zS-=F3$6Fr{B41x~hptVe~18e^0ss-+K?s<%2!sRjj`x0Ey z%f+}%nUGCj=2|KKEowWi0MfkSmZ{s&I7V8@)H7Ks3M!>~P4Qg~gXo=QKLvS^XVsFXkW68{&`avfQ7TJ6yF6xKuW;Pq!MtLtV+LJp4HjP6ri{UKtt3{Ic zvncA{=el*OZD5XmD6eKosN~<_9LTfm%Ft6Uvn8~H{vI?w^|%Y?UZGSV8h&ZuHgZQ^ z4o7ptn>>ZWt=o(?9LPzv*+HGV4N|`8IW{MZoDfH#;ItCZ1xo_P@hUSs&2$)bpRt^p zH34n|Hw|hYor1vHX9rfs30Evsn5OD?IOPpEj)a)xE8G0OVKRiLmkxG~dSf|!8JjEx zC(i{P(?-XZ5Ag*8+A4_4l?rnAUp%ucV{QzVwOFZYdMJ)6wtxD{+t8xE;}js+SQk%u zejYhV_|l6tP^&dfPUF%)L0iP)&Qko(g}b3+Qiv{7hZT>f5NBK>9v2Z(Z79}!mFwU- z!DPD$_RTakdLZ_Un?dYh`cH3R z?-8^dC}*#m)W5iCJ$;0W5v^C;LTup&(Q}%9GJ-;=qoL|s2g}7{PuqcOx1^9!rvTg_ ztocGo!z2k$u$sr<9+S4{&T!Yg z8P#{a3)#JSb zhiy;8m{QR@BzrEoaCPQd3X}E0SGH-tK|E=M&I<-gpqNz9p^Mnu{BJlQ)yX-@qMuwA z!!GpB<`1jST4d^Wo=^x4y$gBF&`+02Mo#$9F8hIz2T&klhHyqX;6Dmw6wT}o1S^B_ zojFHKjLl}=K=*2gkw2)Tf4^qn_`||UWSMcvt|=A_o1gYDzcRiik}PF6n{W(y zOjlRaZ~eZ8GIrw{^~W?DX{?%{fBbbu%SDTy#SVZ`%C0f{>aILG_F#e`*k>MrQt_Go zGAk$N*hNZ%mm=Cpq~aIyQzOb~a~h=ia+)gIXP9eEM2k~?2%LYt2+#9)1P55%x3P0M zhuqEj=BrcOoq6Oy{!wYG(W4X-$0~7H;&s0Ub1u+6nJqg*D!}%y_*6mz~P7GKhmX%GCWd z<6X14=U1-x6{8UQME`1{hT@-M9eMT%k8vr#kyZoPdTxR!_}S4vLPTtXVY{V5&%;hr z4bS2`QH44?(r{)jDqJ>CieqC@kvAO(P1>p@naJ%N_a^->>f#01UOVBw779${_m z`x`EagvlZFA2sei(%t7onp^^wIipSuaK8S~v1y0)B8dZytGEH%wg^5!;Yu~;WitQZ zj>~6$4Q?)tEkyS_g&ND6-pAKTWYr^q%;>F_7Bx4WcHd}BEM%r(1w1-q%!Id?HP9~G za^<~~Xd2GN@)+KOr7I5~l+^F0<3(VUlBL4Fu#yDBj3-|Qp*&IjG>*gMpXf0u#LsrB zWjlYD6z-o6Wd-~ZxecY%%G&+!^*X@O6<*;pF9SOn zxLi^_-LR zUv8?~O1p;hzyyh8OO4sy%yD|qRXu~#AQ^1Ob)JwJnH1xTgwn-~(^-(lm!=9BPI>@b zwL{O0tS1V(;EhXw`zM~xzS6B{(S%*O*_nk!gFrdt;Hh%Xs8zL%1K1f5<|8Yi>WtKH z237KLY($#N%=bw2+cHUNGUN+z?3FZl;6XvCes>+HCw*`r(u(*=XM@5QW-j_n((riu zzxztBrd&?3D<3Anz1k|Fu(vMLAX3+-Fr#%Hohz2vNrYZ54BdCn-!D2EF1CX_AX(W3 z0v40JPZ5E6D&4haFHj^dYnlvDeUBYqQZrG#LS|gLpzhN z-=f_I5Fl8NTx@)#6BUz>rfQtQg}>lq4Y$?qu**M5hes`YdLya6%Rc3~5K8?VN-xyu z(B~zYPN#oT;gwK=-gJ&HBk~6ClLyZ+7#b?|slE5v(|vXaC3TjK#fah}`v8Ta$+ZwU z8BMPQnMQ+fHL2B^aA+7vP%7DN&gXBPgfzHTruc8q8F=F%JIg%9hg%e%iM8<)Ct=MR z^hfw@!`rNa;`~2N(1#I>hqs~~&hZR_q(2*gJBty}#E8=FhoybvXcri>@8GgXu&}xG zn;UF%PV4JU&fnTkuY#^NgQfY+4J7i3E@h)UU#Sh++JvF&H^0B4q-(54{NV4gpN+ZM&d-a4?9$K+)k*oh zH$U2x#y-l|%I5i1X|9x(3gm$-XyzPQDxkDOm1aO$8CK?ZI+Mk0p1f6 zO+j4C59+;RxJlW}|&{K#{S%^r8}$yyAJp8q|oHEUi`82Hn-#YCrNtI*Si`Mvc7kv|eIFJRmc!RO@zgmVW zaXAePfG)8hJ!2MxK-j%+m)>lbGy=#bPlNf0)l%*ghQkM$qtjaKXIPs9(ZHg|w8ZqN zAF{lkJh@i?YF14xANL9x80EK)8gcM}VbI!u>MggK*DLEQ2Mz*+&>P>}8V&r?eAlBT z`7)q!-#6*jJA`}NeOTg?H8efa@IVmFXOdgX=ewz^NJFR7D{}B9Tynfa)c^nyh>z*b zm-e@x62!VLlxm-IINuJGf)^KJ(uP$_BGvjY=j#7h-*`DAiu^$nfjW{oaG&6Z{>W|Z>DLowv*tjXFGA8EY`EOqdS zWC7vMbhEv3a15V3dQg(khiucFiO3trw<_mvDFb6xjWO;5A=$`HUD_-D`T7C7Kz{H% zzWV&EB~S8m`{R5+crc2L9FMvssZsh$HgCOJO&W&y)9FP{gvNoIC{`=LO%8I#Yrvy{ z<_M-3nMs$s#kgttotc`H7k<2W6EQx1s}L=Y1q06$jH*mn24dg7Oy&L?w)dgg&M-fD zm`xtjXFl^@E78vfbKxx))t|6V-nAOy^T9C(A%0iwmW2a{qG7O4yyoO_9s^6LdK;6> zE61&(&OUJ){z%gpEvI!4Dapc+DvRIId*QO958#h6r7GOiw=d*Ow^8{8y5AZ^^2A%*%{;o9TZ8 zpGxS-h^_xG%9M#x8#Z=e4f`(d>lZ=nx40zSAbl|EYTUWO#y>h~bpR#Ma<3{5|MfmTxoBg=f6uhIxSju<4p*x+w@*%>fNEj;(__J)G)W%&z(Bj}-FQMRF5INS5j(Fk{XE zj7UxIwyw6RzLIgNJS`uIar=KPeFs!i+4r^cb7oKhu>lq!7L-xiAksrJj-!Z_s7Nmv zK}4E}h=i6mihzoeQ3M2m=pZ09B26$5rA4Jg2oRctB=k@cLPAJ+^Ihiut;K@1Sc~w= zz2}~@&pvyvp9-A@s?gxN*^9u^`SHXc@dE)RuU`?tlyj+e@kKG(;Rz~rYO+5!wquIn zXp;!TEypZvt6)!{&N{O;V}p;Uw7(9#cv8Sw#Zv&DJ|`<3m?l7<^1SkoNTP!x1Sglz z-dl)}E`492iNRo#fv&5&IoR=xjn+y?NJyc1%ocq1C1T*WGV8#~=47$a6n>iYb{cHd zit0z@Q&~h%p@2*y`EMV$nN@w;p*_f4UfX@CTQ z#^7s?^gfnlALAd330NZtKww%T;Fdlt;srTCUSVUkWXEo}32XSVNQ3raH%R(`K-hw;UhkZHof#O+YR)N~!G4+uS@torKlG+Jd&bor78eA<+7V~{%)9WTfm z<4{ao&i3G;F=M~y@1=hXtk2)XLzctOW%ZyCF_zN!oZSn|k=~LM+eyE?c)gwrvexiq z_WiG8fsfnHf!W5-QIYK{@^Qo*c;p%ALL8SCR3En63$-}CBRYT-M(0fnf;}(SQPxK$ zGvXVU|HQr3_f zKUbD^u=?p96xOPn%UG7j#7y^|dAw@dJ!FCYFdqwRL>8-kse@>aXbkBj9N%X9+W)9w z1b+yB<*1JLV+2snTVua=-&QMHd%uFdBjT>ny9ir$UIzJZ2YM|^LHJr~MWP|l$v!dh zM)VWs^r?(6?!={f(vlB+AR^jIv#!R_7`*}g^?is=E_Hw3mU%|pdx`G9HR-JBa}JD2;8E!uZ5;NP$nh<^3YqY zlwAMtyN%G1bT>jYj?g4CJb3wdn8malOPWmU^DDBCKV@<1`|85=hV+HPnsqqYW#{L55|L zml+&cGPiB?3e=H7kiQNAhs56y>3ocm7e+;#H}=cai%+n5(r&Pl2_1ywrsH*W$=Z2n zw$0%7ht5X3W~AvQQWY?gb2oQydI4<RBNR!oi7Ir1!vIIN1QdO%XKX^ zDMyaUoxGjQpB4LAH}s|89>!6=*JVmMYoaCMb?XR>3}HAM*JZJWw??k~VtZruS5D_; zU@x8nYY>b27^edc9$!KmUXLNRtVN8FOtKEP!=CemFj*}qwGwD_#zs}&%{?Ay7C?bi ztiU(){S!M8feWC$zA`wvsrwB$_cF|N~W+q zHr!5w$lVA;jHykQL`R~JMcD_tyQA|@%?PVZGJ;zV$HCUi03b)`jDp>U7+;@!N<3=d z+LTdmRJ>RYYPQta*R)YYKI4j;>55@lPb zc$>a_^P>~0{j`|!u)d1l(0yP|xjXrCG zJR>f06;0rz3Qxd>+=dg+8gneB>!yNC-kD|myWut}pZ{61Din!ID$A=wxjJdxTjNo= zlmbhu*CUm(7LdNtEbcFW2uur4l65j8It8en=W@bE@Eecb-Z~1{B&A@cY+fc+9|_1@ z)()Fm`QK&fB4v7w|8$RLiE%v54WP2c2m(X(^u*I`zelI&yv|)jsnmH5vIM$7pJcwj+`bxG@CF1Y(7d4F%HtsvamC_85 z09}AWpq$h|pRtiu2wP8_g>3|7|DQAXty%gApCA?HXsYl4;RtmpdfBa$MsUwpWM4IR^D(YB_#d0T{pG4Ul;ZNZ;B1H=@N|+F?an=5y zjmA4@q$w0krr*-a#kgDj({`k@IE}uWOG8f+%Wp<0-L=(JzlDz?4M}bxA;PKC7Is+L z%8?rLb}TRC*4|*9ujk{K>Gd3QSv*cl*lW17>mIZgAmmg)ayJP>y{Pk1CA`!jM-u5E zhnuS?*}OAoMPRv>`xgMrVph6?g5t1@+s^2uXO>&9t;Vm8aH*5Ca_i`_7c-no2W>Z@ zlUb=oGci&L=c4Tv5^b5}6$y5CQR7XI!k2_8o2t(1V*wP_mz9M(r%j^qO%(bumq}S; zjN3ii9S!+ih%BIl0NB6rxKA=mmxXfjahrYdOCaALUKsQn87ai*1YZ@L2z$rIFu})y zZKn~%&7Lp!d*Vlk0VNz*Cv$V?EhRY(Ep)IpH)OBR!6T1lOf274il7$NF)GX@d%t;p zr$lth$EoB8F?so;f8 zTm%5}Cf2Mda_&$bXZ>2Nwu-unygl z_JZ~rgccAgY?J7}#m+$0*M<64zw?B-Jt=Pd+jaVVT6=q)jwx;PaEFS-)Yg%lK<1#2 z5=^(B2;SS2IWl7&nPpeiI|}i`;3JOwhcEanqZ10!-k&x;J z6Ki|;I#BF_e-f*SnC?8Ak73GjKK!I?o+C;J-&1STW~rI@6@;m4JWyouLEzpZu+hDZ z{G#%R@UZ=#P0XaxV&LNpZ?WZ2G{wKKenRhr&Bg8f{S9y5eIx6SL)*IYfm~DFz0lCG zP!rN7@c+wrF2q8dDO>Z*LUpK(!v|1lS5WaCdOp0pd!&zZxf z94+PX61Sx4)+tabev&R_sm9Nv9QrN@imNlAM2v(;Q z-NqVDk<{*WxgPAF^yt`>d7wvEnX{61I^VVV`{i^$9CPAGvSrG&gYgpV@5RS&2^?P|k6rZ5*o7dW@6ghPk&NmZB)G~A_0SKy}E@&X{8@0 zJ}^HSLtpqC>w)uNbLJ(d3vzP_Bb#`YW}s5~v#R?0o1p#RMU4k&FI0p8P=J)5yX6*7 zOi(qlSTq{)4UfL6qMDd%ve_&MM=m|emKpft1CLZb0Mb}1iVxn^kbdD;);U!ac zw`$f+l^a(+5dGx4VhZjC#OVuw zk+knK7PDCDx^>o1m*vICu$SWG<~(CQB_+;#d}&u;W&^NzIa=}^7sGA2Irx44$Z!XS z%&RBZHA-((H}o3LZ+%fbyE^e$_tN4X&JEfuiUYf{qc0In-|6UiqG{j7LM3-xZTKa|W z+8X)%@{Lj3kEL_zV|#oB$mAUhiK)6fg&~(HRrngBZ+Ng`O;b%G9b3+7fI?yvT6KeG z`g^Z|1F{1^li@_5Z*y*`Ty{!v&o(4J57hLB%q0=n`;@B8g)XU4oZGTK*^FOxhL>77 zwGnuCpxbXXh~3%uPboJKbPL>IIuBoABTMLhRVXg9%sPDbCl87<*;sk1-Y+cjHFGAW z=Jnqr9h%e0vI{I|$4N7ZAx50W&Cax5lk19!n#t)Zw`=sARis;*Meq~3YQnQ%yQh!W z-BJIfEMO%MW3HCsGM_Z)NN7TH5Ezgm*tZ@nwz=Hz8C%$&Ccl+4LJ%xk$GTdx!*=3g zKh_=((n9i!?#R>&M+jqMxFV{SdavvS9^lnc+0!DR>5r}m22*fM-X)cxSk`E0kvP^{ zoWO7KA@}K>h&TLQe0u!5%Q6i+V3IP;d{$b%{9`W@oyl>sA`%)iO z1g381o|{b2zY6qvi;j5y)#gW;S63948OpnSDc^5UR!sof1kL>-D#-F*vxi3bVlNeW zA{bo+fKoGd6*x?NWnp^H;y^pLvcCWC5rbJkw%|l{uyjc&JfibR#PoB3-D(6vJumILf z6dK9XOXr3QD+@vF#=p`N-8Q{_`&R-KTnSx{*xCjvM@1R2pUD+=!4s%{=dSTFTQbBQ0Ks%HQ z<{ou;L^jkGo4Xg<-NNhmn5V-p%Ts&XhiZ#r+pfgxhwXEa-ZsI^$Er3;kK~syq1Mj8 z;n0j%B{PzXi(}|4%ocE@`9Z;m`-p8aE5lao@NpUo+Z-C}UQ@}}C(bVbKb>I>gi4t( z@-%k$c6Q^&cDz1+I!U(QWiVJxJ?aZS?gV2@-&^IPgI?gN8G#q}Cvmtz}tA;w@uNJ!H1`nQ9`*cOZec^yOkfjE_QCgKU}xFe$V zTXqr4~sJ6YidhPhsw#m@bdAkp|1og2B5G ze7DxV1bgwc)KsnkZNvv=mZAO6s;)I1;Hk{>ftS1YW?##K z#*IU0n(UNVk?+#NHhhS;kj04i z_;ci*K7ElYyTrtHyit$L$7BRdHTw80d@sV_@=SQ$>9552PfcyF#m)u}shNLfN7x*D zY&8u-+PXR)w7MJ>X%_c3|L#E+i^h`iZXwJ-9xIJffr8>aK=W z%KxPeEu{J*No9!-DbS^lN(EdwGVwc$H zI0b&uZ+ud+$t9vxvDLLUgOTmU5G}v1voyRfG*nIl(LduYo)LiSD4&hVAif#Y&d$i% zv8(S3oOTN_Y=N};XO;^U2IVRCM4sKHr={9+f~GZN`owGhhdS?@Dk|5GYif+S8)N4P z#eogiUlh9U6ZS{`(sf5Sfz7-_tirVa0aOT%h%bzjwj17G@R_4hv5JvSJQ2tB%=w=t zVkmcBhbafoC7x6_%Z`+7$Qat2airm!gy zOwHZczly!QUM?lJ^n-!jcNZOKO7l$N3UZG`Zz?^QWvueQ%J&94TV3;ZRG<^y%cnI? zb_gT4IN!)>I?VfyDPxtio9A5rnW8#var;mP#^?**{Nb~3H|&I7hv5;!=a>+5wyRaejPHc(cFs94_EwAK4F_6F||mzc4c*{O96v^bcafMTSXf*O{*) zdz*3E!IlQulCHdLLlo%EZ%P)WzG-kN0vg@9EDHM)bbGwQq)V_ebCqn7)l){93QoUV z=|(OtH)*Aaa$H=&tiEr!o0kE_qZQb-JBj^fRaYW-jFyL7V;-p;zlR;Cc5LvCIE=cM z;rc1h@QT@#4}2ku{6Uv3D%bZ&*Q&<4Vc)0S-1GFsvZYXi75OQLKE1^zYFS`RTkf>X z{zQC)L(@B$)?Y0@I~xNuUR1pNh}k!~8U=ZO%9*mbTZaTfkwO7JbI%#?1(4aa#2~1k z$GLh0kO(OU`@YV9*e;k-`53`qT3eVnl;i(=LV3O%DKba3pc5T5s2yW~k|Kbni)58R zvezTbcBPp3+SERgwTB=g{S1^N@Z3C#e6IGv$27{NPJLfv=v!3dpTA232<2#fPW zL;@9tDNm4UeG(L&z=0_0^Zl4BezL#f0dC0sR!I)kq8YA20y=?FXx;SsQdZi*4_SyM zh=FOTgFf@Kvri8bBOaJ9O-rW-@syKJXTI!R3KJ52)~2l+Aw>BO+o?SOF3QMt49{yK z65zy@mvk{cYR#i`ncS+QtEnNg}|N-*g$V#m+LuVm;K^ER+`oGv(u z4}m;#X)XER4Q*xx%7$;?0TKemRw5)WkGh1nT6DWUnXE>SRk{~~lL!k-+MqrIbzm>* zDr;t8>#B2y66_2^Hj^Ro`e&$W#6-gI09IQIGf27 zZlhx;afs;{=DEe6+We07uu|3e`yRf9R!)3f)-Z7Wn+-BORbs%b0F*go&rQDCBfxlNHM}wO9(u)K*{c7Zk1v?D+ z-wp3GeZ*XB2D(~DwnL>I|5KhF>F(IEeuE0j79K_P7u>j z{s&FAv=6&vmIM)3)JPSdWpU%9y#>cCD%2mETh>5p5801Mii!Uzoc~4{8S)9t_QwG> zL)&GH*&fzn{28R!vfc-*-ShQ6x^ht9Z(ffH;3;gz*MPM4jT7JD&#R+sjPWBFa4Z=T z4xfARR8Z>Yn=YpS3Xd9^U1K<~3CiM>W-aXj@*WOa8JF;>=~v!&#!$@?P+qcY=#|;# zk0RFOppTT3d4fHGMuFnQEv(#V^EqWk&hYW6mb zJ%#`;T~`s1s(`Nm$TtVUVq5n;lV0F8bzky#*$JBoVIg=o1@{+V=L?Aou+6$Up3gD2 zd7ou2rl(AQDN^6b?Zte9OLG3~aA7gq;oLMFe+z6;sonX;ChLZ2`p->ID<{W|#2Y|Qs+&VP| z2Z6*Kv}8g4wv(`6&tRbpaBD41yE~)^<-gj(U3a^J!NSisfi$Zkxci5+Dtp&~xAiKp z_kYZi#J0;3rxxG4{>2|XX~Pv~Q-<8p@}GQ(Qil8?k0Mb0Z@q#K0Ltx#z_3>S48Dm} z0J+5%dATeyIJ7ueOP%NXsqx6vc9SyV?l}M8A~Dcu2dm(oPIE0vo3Bg^QMQxnI#W-L z$~6l>o~wn|t=>*%R->Ei4V2L2x+-x5e=mD%-y^_Acta*Pq3LtSgC%cx)FjyttXo5c zGq+tC9Tvxfvqx_?B+l1q;`@HxO1uiq?^TXTCq3NmdL=AuuR!{K;Z|uFwlIPgqIDWN z!IItPzqqd(e>KjtS*B?NCq}C%vLcqG>21F6^VhrgL7;(;6*;Y3<6(Xt=X_CmaiR;F z(>(zkOubxTU$`_8{g|i@E>;H}r2oG?262YvGK82g*)k0U42jcx!0Z|T3w>_mO~C;} zULW#0;9w{q{RJPC8G`B}ztwIW%mHfs34c(E1tjD9kLmX>vVSxOE@DH1)XT8D(H^ zy~=LzNmIIW^;ijs2o0`}Yn9Otv_XT0{WmBcV7{bq7e8X_#?SoiptzO{ID5j)l$u`~ zfTXqS0EOiQPc^M?;kRBFWdCCZUVoK$0wCx>uq!?o=|Ne9F@_!OXe}8S%Pzpb!H+WE z0-SfA8XnC7c;DWPVh;MRn?Y)#y|1$;n8RAXiqU1k>S({7YLco?1c(lL3i|St9Pm*|KRnui-cI@)Ba%hHAKfBw$xDlUMlglj0yyzj3>% z4j|cmkW~H#$Q$`&f?aT3R}zsdZb=rW9RP>f%`&M?fO@Txva$_H|JG#i>+ zDerJK+{J(oU}I+~V7r4f?EzX;5T7Xh1qNC>@)t5gQGl9cwJmH{;B5v9DZV4}^0EWR z&NmXv$0%Qn01Ni-a1dK2jS$7Jf#Zi{-8>0QgeY9)4cO?$)0P z*h1iOlz{M-tSHz21n(FZLmTam)86j(5AVcQ9kp#_9ITifkXQ~*xV++$pmd=q zffvy7O&bd4gQ4A{l*wwBRhNc%GPO1uhG0_O0?NSCHnee};?R2_^e=*r4{&lM5eyQY zULFS8CA&zgnt5J%*rg_@1|i5{uw;i4d8>P#Uu?hXrXnQlNnKwZSF=RAs@dSkCP}um zK@IX?cv)ch z2nj~^3*|wccE1TG2@i6)H`u%Q13v$52*goYk5}yjVN=vEJc|5ubrrT@FVxymY`RoS z=t$cM<$sa!7p?A$q-L!+pxydRO8xDw`7JL(amX)(;eZKXvq9*k)FaWYf8O-CMd^R| zzN~~G{~rrq1I!q}8!01!u)i5c`o9ci{RD7EAMSn|S(!JmCtc6L3jU`OyR?qotUx&k z0#cpLOd10Q*e#d`_#J$Ozfgy82Hbfq1K>0Z;shvuc3e~6{qKfT|9xdPs$4|yf9;Js z3G(|O9AI2hMYV*|AhN5XC@S^rcSPsoa0&;slAbZ0;{ z=o)nL2!y`fF=F-D?0j8ApPq?>;RV9HBnix0fA^UIp2;|9vRmrVR7IxKFygEssjE;y zW%^0?uQk!Lu}c0bt7V2DcC6=XZ@?J=BEjTi?KfM7AU&KT(Bv-D)u*a7f6E-5mx})>JCt7crZglyP zrL_R*5?_-d0lg~8=GZ!lBQ@)AXWhW6QGf2u=YaYHW=trv;{ilkLU$}Z+w@12FKEOw zB0&s9&^5GzZC|AtQ#wRIB#}8X@{$%)JeeOy1ocuXvExLXS1gGK~Xsi#CxsgS$3 zRA+JEETbD#LX{GjQgCYOLoMw)bqeY*G+aQ;h$T@pl7%cNtWs^wXs;p05Ik|z8m;E6 z{$=GRjF2+JBN$w-mng-43s{P81muj&(TLT}#udN{&k-C%Lg`INqcU`{HZ_^487+A@ zllNWQdF&nuYfM?=Xc7d)PkYT7fq~|$AXt0E{!yJlkjw}V_dFeOag{#iG>clyi!VVv+)?Nl~^}l^W^_Z=3*7|QQ&jv>xFEtEY0$6iri}a74&rI}l zh_My3^dYi6y1YZ}>1oX37uXu;oYP78`ZCI4rPRCt$rtjHp~Aw(xoB%6u9#i>>e6F8 zZsI%qf#@e5kMYavzq$H~belMO3@@~kGZUKxcJpsWAR2a>G4itLbigFoJ{jS{<56^QYG|fyTJsnndE_^r&{EA!c7JfYw}qC!I*k z`URo`i=m>XdH>kmbD}WNr=HMeFcBZ$xXcEib!63_+aW{hY?|0vH}SaHmnj<>Vr7Im zsM3;m?NPKpB#xJ4>U^yo5b1b!R=-eLXM?!JH(J3s;o`C8?#ByqR8zXk7)>$fk9rLt zA~a*MB0KG6l)l{atKUNE(CVykDX%HP;wJNZBe~7d*_ud^M;`s07eK4p;YXB&j?`gE z%Z1eI+*L&9-xGL2ZLA8(ep;d2^RZKwLOM*65ogfnZ1;T~J--I=`815xtQ@i&Jlqi$ zGaxw1{Cx_wlc0lc5L$~Djr;z5$hlhgK2ZCLd{i3(11|uis|co+W0e+(b)ccc=+k*sNTB}fwlz7pGFg#ETd#*Wf6O2vr{0pMM}HL(o`$wOPS#R-YL0y zYx3f8;fi*X>+))`6=jlu6^mCnq&0TEdC1iaw%L=oHbNpz78hSsp@0pU1*h4u`Rm$e ze0n0_h;O%K$8A*q?mhS+Z4#*VXQhcj#Sqq8i0mO8DDZK&^O39>3Jg3+e^-{i#?IaJ z0}FYq$edH5BAE4bVUhs{2CUpS-eacd-EweI=fHM)6V-M#P=>aWmHTO(g+piLn`M~biYDT8xe1W)j|Tj{Cb{t= zj$pTFs^aV?JW!4h89!b|cJ29+(H(ZRPV5ebJBd$E57+d+QOovzTB#fwAUfi?;Iig;>$~w?SC_u(X4Icz0^iBUa>8ZxsSQco zA5vaEX}i!na+Y@{&o)+n3(fwWw-u)PR<*C^>CJ_K!@jQw&U=#zBY&ZiWeyEznt1*- z`42*6#w>?`R@blmt9{(wV?IG|N-RvTg0ssb_U5h>hx$*X#}y}@)zK@2x_7THPG+#D z06#eU6jI<2nbPqg>%mVIq^%gQrwaBzU}w>ZWF4(7ypiM^L)M0ubHZJm6WMs3&Vj57 zU8$39^As+mj_VrLJV`jwJQ?j&*MEcP++{p!uv0NRV36Do?|IzD!X-Qi$947+YMcF^ zH>@48$)wIbIUj-%P=kU8!{Uaet7fqxgVatceSi)quV#lwp&eH zs~_?6#gz`x9}4e)<8gBB2o)qtkW)YAe4I))qR=q z&0Ftjxeipi7xjk&X7^N9A;_@%AA)?E3uF+xb>*3gpue-8Lm8I9T}iL(^grJ0Etn4s z6@n(E=^owC#1@uo|J(0l<#nZ@bMADci9Y9_gVCL%YZt(ovZ2l@R`FXz_wRza;C|QN zdXOfCEfL`%!+9Uxl+#3}=|`9KeS@uh8BrcJ(;aX^VZ{jh=Eut`-|LUv8QW8R?M3&o zn@>EOp9)q{UF|>6EY|9U8-I*&XJnE}#VHp+s94s^w>+I-)4My=^!uX5)kQJa^_s<( zHOGB9uJrIET|*w@UJz#)fKO6C1jvV?eRSu5{D3aVjf}9Xx``4xR-h;>O2m;w&JUZ} zO=tzreXVP&q7c4YwneClsO(~kNs@#cP7>5*MSaagpvBQwWNd#qS<_ZgDY)AX|I*V>Av*4)c>=mn~q)2%7UwN zFH8}KNq5SAPjIi~o3)VkiCvp3y>!w_a2Sq$I`e@}*4fX!!aJ;BWxL<{%bxM&#Nz** zSU8)jn>pbfWPz$kLXKc#}z+^_*FeVOdDb?$91C_eD3U;RmcE7vnn^`(sQfT#ew%KjqATLJEc*)p)cXn?;L#kmc5#e>RgJnl3< z#h9fxEq5LtcJoq2J~elJ)MmGdAI5bY1d}o>f4B=Q6qloRyWldudD*25bp>Hq3GfQb z<5OfiDJ<%JB${QoN49~lMBI$m^`05ViHmtE?c0KU^u!A44^f?=CUVztoAzxch~0r& zYJ9^XJ=w|EVl-PNQRu`9FALJzF<=sZJ}?33Yqr?S??Qpn2j!vF&|ea@$`T)_6U}0_ zl9J8pc8&(XIYOGUyy{@*+{T>Q7?6@Zpq`We{7vRw^Xhc8Su_f>2)K1|M0!0biE^Dd zT;xQKw-fn^!%OD=o{hNH5KmqBfvt%;Q`c9+TzKSj4gF*FNC#T{{2HukioU{V(w*BI zCUXGN4_zBsv);O&n8OYwYhJD_h2*%+e@|t#^1UvcQaB}NC?<9tC%X<**}FUkwJFyf zP<-A2IH(N#6nY(5--SZ&7_m)Tg(Y9n^6N6Dgx3fLFx##^|G(Tm+DBc!v_A0=VGrH} z&~E^Oxj?dEy+Z!aaIBb3$Rw>iR%mgREaQ>{EqkpQiHP&He6?2rP=UCa>F~a%Yg$B= zIs5Eu>La7e+yV=`1@I9q4PcD5Uco;{m6y1#@BvOVDks1y_!dYd&QKZ)55vYh8=wm` zrbq9j{H)OK;AGEZ!`)3=`S`m@RC``UVl*Uw#YPX)*@VQ%QoAKJtQb1WN0E3sAG2$T zJeffzcv(mLJ=a+BM6w}|joybF5j~f`A{&+j6WRrXQnO~1%4Re>@y!R2iqa95qhZ{#8Fn|S{T1Zie-ZlkQlMbe9SCZuYD z9*!0L#mC$YF`?`_9j=mO7%AU*iGj4>F+QnhM%|@-#8%Zc`&9&HLwB20XBnBqKT*H( z10JGTzlez?nh;y90{y22TL;2ip0bZoLr5 z!P&vsUV?1fs%Wea89ScqBsvJfQ@jn9pZrNh0cyT1H4ol9`K{m3ggL}KQ(sjDx|oWX z9U#qbrMg#&)#FQNU_)_i(HwS|3Z9(;(iUQ&7rgCpI`)>>t4`Ve^#cmrtn0dpso*LB z4s6&D%>ghTM5N2mVldpCir9uU;}|LlNb_38pCcSKI-5$@pEKB9F8_h{FUv?L02H1+ z^&Gnf_Kmvg!%;bjfmBp(z4>KpLCM*7HsPC6-6_?;TDY-BsyUSz8$ehAp!GssHYCEo zhS-4xkAk)NSXw5PCO&YTSdJ_52KK=t{8|C)bU9oUUOG!uZaQ$ygY*Dt{8Ev62L8fZ zeM~lc16{93E__iIXo@<-D=aMyjB#?C8Ez+)7!c(}pu=gaT;^U<9jFa)Rg47JtyA%h zF*hw#pF|f2F2@1I9(^6T$#lJFR)`u}@UoyPPg+b`o^QVl#h(H~FO>QBBr#f&QYSq+ zhYMpbA#l#AZGPs)DEKSfkFz%o1Yq%v`a*F3%|U=Q)XJ(f2a%%@s%AmMH{N@>fPPMq zu+6HoSJC;XUA_pn5HoZ}t(jiT{(Rpk&Z? zyvxre<+nbW=RaPNsMjYoNYi1Ik;WHr>3#z!e6?@v)PHOq8nG}Cf<5y%ioYD zGi9oV=XtAbh>=W=e{G|3+d^H7Ir0bIhqLeR2A6uD0x;RV2o^ONHGC_6g!1i7Jt^4o zFEsx|uh7~x7pvhqfqq2ZQ-pJ}HB*yxwU=9sT|KH@4f;fj1ecIkt%`9OFH0pN&AuPIdw|}{n4%I zhjKO0HT67Iz-87Iee`y1&2}V~1>(2%HSLdcX{0Z{RKH*s&U?Ya5>SU{xDz{?mnVYp zs=Z-5KBWYl*HejELnx6q6t|4}J#pF|z*A#rO{nB` zysEE0J1hz}CLJd`#$)%pm8>-x&*Aj0@7w&KVRQ`w~~4@NZ$S zUBp?92P8(Olv}kRHtjrJ%a6LhGQtWd#uc&8_m)+V{ry$@Q`NrwT!+&$!OnLE#fDU? zYM$3ST1@H-7-hF$)lhFrCAJ_8*>KIBRl z9st`vD8P~`ddZap@}?SpLwnXj>fUSGmcFQC^-idMf; zL;DGX*FDb%cMO31C-22$qg3vWX&$7Bbl`iU_=*M`jU!QyS!0U09D|)L_Ndd(ImhRZ z*k<^D@x=uw2kjnW^Ku-rCU15K>>Yjk3Leq`4bev3=7)^SSD^phaQ*ShB{8(OD#+aD zAX^qv=}y%2jj%tmbLRtE1d_V*q5IS^%79Z*|ZVA=BJ*9ZRo@Z@u73_;{D&M_l4@75>t6lYqoId4)UnS2 zn+X3>RRvPU5s|l(JsgLI!?-k zvoVCE8fCW~yEtoReBf}tE$$$n(yEd$c0Aeu>B@^>tALb=yL^^;%{U0X6^V%&7ZfjJ zMhwZe{`s?eVch2d>yIR%%?zC_BVXo?{FHNbHzlks8g;}%XS?@h9$_a|1p!3qpS^t_ z1yYMVzfATUMjO&#pJGx*6vh}AXQ1|oH;1O8ba^2-orgi=yjv6&LNgJje-*B`*lQ#M zs=k!~;C#n!su8Y}5k=U3(82`_jp~~hubMs?Lb-uykNXtP=uIOL?a%O8@kZx`iI%wM z0yHD~oiBTI-T%WJ3=!D76tw!DPG>IKN|y{*4l$U-xc*zellQaH*N4Ec6kR}?K?5PQ z8p78#|E@57%EgHw=i5*jXJlcwtbyPADzLUz#W9AnGE4l(8SQ`5k48r<&?tL2Ci^@; zFIE29wI2W-xM`slk+m3MlL5!VNma>a->+uvb6i;dbN4!6xUL96qZA(VC)d>9K_}0^^`WFk zg7``7*XnionA`J<@YgT@ZWyD~7-3gH%6O3sCI|w-vw9EOzepXC6^O2Zg&NuJ0R`(bd9@!9Y5p3F0h^q0crHpomBC>JgP6<7bWSp3w4defwnu zC0B-WV59IlweUhK#hd_DC<4pH4q9MkUqPo9%xHt=9CwbgNLg@O_lX9a(Q=RrJEIHV zAqhGxFHJ9^PJwi3E!&qe6(|HV+rY2bwST?kln4kI0HP2&%{7{_BlC)a-nw~>Xqz$v zKvdI+a;fDwm{TR1xkQ;A+zid_bGa<8FL1AZV0QnO@F{YwXGwaxRejGYSZKlM3nPS7 z!xJ2#a{e7_emw?E~un2g+V73qTI#Q!;=|7=XOSct89}rNYVFDOC!;+>Sxw6b@ zJIqUU()Aod4Adx6S1DrvT={yZM}%EB3DB(m+gd|p$}?fI%|_w0s$_bTI5)NPXm5+(G1j#UByi$2)c~t%?t108?zbWYJtSboLNMFY za$<&>M;HLN(Wi^LEGJQ}U^e3iwA9mD2GkI|N3x5&+;SOnV_ftb@A&-lys?*Px=KrK z%zup>q%Bw%K%B%z2-@!ivE2!1zA?a|yI7niwS+#QQ5Lf6{a2d!)5#`aIb)Sp8Ues? zmQh37K3rb(5NZ1+P#rzAw>^PJJ=H#kbNZYQ z@|*Ezrl!w7ZD)4ZC2=uy{m{_zDLSBq?}=ujMuW06&A$|*Q*`>mkki?=M+o!07SJZy z4HUT<^XNC4RoK85^x0JH@zFFBR?|KzD}1;8ZCy~28=q)_YHq9!fFMen*U%XNY#6hF zrGz#;KtvIGX#r1YP){g7?5cEoGo6JCdp4bwp>ERsD`nw1;=cxJCtc2%qYM*QRR1Hb z0GoNr_we-q4N`4`)n-Z(pzgTxrdyy*P3qT7@!NSS2`e9>3+XGM$?E0hJ?i7V&bQPi zgGNbL1b8@qgAVzeN0F$oxCHv_cch`XMrGUVJH6qb3&Q;lH{mWoD}+!iZT@59iKIpM!jR;&6j-S{COChI(tE)>1eJEMswYcUIEL2()9&3t7T}K*#w>wxFLBOO36sk+u1vh|JfUY`VehpBD zE7yPg-#44aJNbI2QD?z5`@Q-P2;uLatK1qAth z4ecbcads_$axNL(`S|8VO`&H3z-VdcmEdr>g(;LA;`?6M?{n!Zx?>$3e=c8LTlqK> z8{wstoz`aEcO9sV3MW<8qS`1N0s}yg*j(zKEPr8f#8g&63|L@uzX%Vd_Y@x^i@o$W zjve5OYAf4GP`J*Eta^|Y+_&a0TF@|kTe&Cs;q{}LtIOUth#`UY&G?)ynk+^zLIPxw zCXDy#J{bWVND{a|hUt<0{q`y3ljxi->Ee6MXc6eIQC?n=xr5Q7qumw9DDp0?_>k1c zwz1C$1xPyfw_bGlajc&M+n%}IV+Zf6E%NV1wNd~7%$)1GkE+$?*lz%N!# zB&_{3>jxhhn&7aYA+G2o@VPo_S`Ll3@)aH<{YxCem6+R*(UE~~f@|^WQkN6Z&od%G z);Fm7wlcEMN+0b4cYz34^w7l4sFb_677cvEzo^{T(nrni2;B3!&)|qD9NOPTM$gnj zKE&A~ehE5c^qm#u#T10%%vnPQKSlbI9lOz05%y{NfDfQiX98eT)K?zhoqN&2c?A{% zh{Nl=|8ItXItmu{l)A2j)Z-s_=uVMGhW~?tMAD;=@?fbUD7Os<-mWuLV!%hpx&Qqg zfR~(R?8H9t4KHoVupq%mL&DsN8dAYB{uLR^r`Q9Sv-F>rleZ+R9zfA#?enj(i@hZZDNi zIDNUkP}i{aDt5k)G6>Eaa4Fyd0OJIRGuCWLhcHw6SNz?ss! zr@HH%rN_wYCoR*_U{~d@$Sq{@_G3+%rJ?@&CJ3u{!^~iu;8FMrcP-#0o5la>rS0|! zWuVh276TGC5kMIpN!`-eGqXCG8fnpkOI!xY2P!fVdRYnG*hgLdiOVxq^te#E8I0Nz ze-D(@k4%0D5m;Ns#zvxvb*d{m%?_6r4Gr`(KJ%$6!;hZw!&D^LJqz=gLR1a^dRDRJ zv3!YEWB6JG!Ld;=&`4*cHg3%3(~>II(o&Jm$XXQPa%e_)yc44l9!70NX;D@+4eT_JSmd_*i8jflXv!w|#QP zZx5twMu(9epBKm2on1ks?1d7Vkb^U#@RZp2jpxNrf}=rbcF7!pua@s~<15JQfK@$c z4X0s8kJPX`6lp46mR@a<2a<*kEQFi+WTE9juq8BJD=A24F4VI;&EqHwng~&t7z!~e!W@r zcYki!TKL0e;#1>h=^5G@$d*8dcv^PH{TNLt(Wn8HtZ88#0V1oAlRYNb&hV3U038ny zHeO0xetvb(<^YT^-ea`vXdN4|23gAQTJI-@F(gt#xBqS#1dftJSI%k^{9Z$3Ybv&= zh|*g%@g7$&2=Gy6(|Znz;v+7(UH7>|r%O8?JrKbpOwuszrOeOsA6BW_c+8^dDc}Hr zLr?H8q-3=AWy-B#%BhQ1$!Q%x=KMM6p`Yadv;mcVV6e=QW;lR>CMCBQykf9jik7yM zOY842Tnwe24)mB)IWcm0jokDUtYA8clV{AeF>LIRyB#_j5&u2v-wnE&_EGy>{os}D z)3>Qk49zu7NV1X=vFH3bhqR?7A>wQ4?}b<3Q!+wD1d(gM9MRou3{>~zioO3BJzN%i zf3bJWQAmh62hC<8;XN9Uo^WTZT;kgZ*}bUFD_#+)FCn48WBZrtxPLd02}J-W*TVg~ z!Cdo0gWWy*9|bDl7t!KX(Ri$Y=rLyW%wO&!KVh-v_lp$vFysm?CM%b*(BiTnnhE#@ z8P<@~H@|_5P@Vw@Rpc}nnA_sL38&h1FI*Aq+fK+pHQqrx){FihOWz*Pbo>9`efKGM zx1^I4vAbJmp+YL`{pog>N>VB3EL1{TLguvh-2usAcex|SC9&kN9Op2W!>kgEIb_(_ zLNkXAo7v%g|1O{J?~fjJ-#wc5uJ?7lUa#x*dLDohcOaddUj641m5NM(!U9HTpFHJJ z4i*gqW?F5RmwaZn9x19#*Mq`Skcu}}3reAv^J3(8d#>l-6C~DxL|V{vM?vu+3xcE` z6JWU8&&sO99tSC*AP%Z$fa`6o0*+zF9Z7jXzL5I%HY zxi~lI4nhEuZ@A5vvj2EU95SquCyzwnuk^jEkD3M9O`fc_#2oGE+m-0hXu(b(k^py~ z@K!G%*YWq)e_JF&eRzm4hx|2>1~=(CCN*e}H@O;Iot=};KI`uh6Um;@lhibn>F%S; zPzV4C0vq7y+p)|b1z1o3qCLUTPoI`uTziLW`3&d~DwC~iCk>CbaRM_0yT%r8YnT#h za$E%pfVaiS7TPRXc@O<`(U_U{fG>eECNire~996Ru1sf znh94~!`Tj%b4qw*ofWkX(+-3rfB2f$Vu;s3>75gmh0m^OVxnGLxd}T0N0dI4OcqB5 zy{bNIaYk2N&7bE;X-k`!XfQTcT{+?UeYdT{i7~mf?QS?4&|ve};dn>{6d>o-n2K7U z5W^qLfpg&P&Syq3>bRmdwuJncsy^C)ZS&kqP3+hbH>$W89Q6W1fr4*Yu8uh0Fp85 z4m8m>k8G$%$!w=)8;79pC{-b|Rxlg<^VU$qlE?w7lnyj|B;lYU>>~<+rUQ!G>|Mjz$O}~!iJ{S!8#-Da zNvs&Mms-IeDaG)Y!cqXGJp)uLmsW>>aiqYGL)lo8vCV`!Gyqs9ee(fl-hqAck%};?v^}q8WoYE&Mh2-0eQz4Ue8we( z-lE)>q6`ndWG9Looh}NbicoYAbDA{Mg7_v=jkGxs;D57H#r4-P3dtd%t# zif7oJRV)eC_^1PWw?e!ZP4jD7Se%*tW0tO>Ld_TFSNzKVu_2GgzZ( z=c4MUlpd%PBKP-e-)1-TCdzpf^$??OPT1fo@Z2F%OFj^i*iZstjM8F|6pzPbMvo=L6k{1iAsN^(4$ZH4?<(} z35KWzXi04WWcBz!d}!2pnT(;!CHvKOf`6OX6f1) z40dH?d;19X0L%+g<^G7C^uO1Zy${@s&-pth(#HBFs{3eHu~s#~;;Hi2oSlvKD_iuf zL^rEO18rK-4i<1T?6XiYyQyo zN}D2?-6`xw_=doZ_;kTxUD*iihw4QTARL7o|(O{Ud|4Wb~!!fO*(BvHtse1CI>|p9ft8bL)LtBgOkB&OFdK-GZ2lF3; z1LyS%bj_7rRfjSQpAFs-XzJI30)=oVl1Or1_fZNL-G+$(pEpmBz_y_t-42-f6>?OkIjDjdh@e|nuT;N+eCNI zpLE7;{`#==m+>!rS={iADNWB~t7(Tn7opejZcjX}IP2N6?Ezr40w6e;2719&hXKq0 z*!MbV3*Fs?7%3nk*F#sDc<<8@=a{w;@lm&hfN7*RG`4kFn?rX~Aig4IaI)lNX<35x zsTNNKG_YRT=;7(W9DHj)!#6TY+(mR=blupX>IS{Hwd)M>^bW)_p1pTWo)dl7@JpY- zLz$EeZ5om;F#1aU>L_WZx9^r)o5m!b|y%Vk6G! zPrt47D{#-jsX^1u9K3d~QDbLKEohO(eab}r0O@sXesDoKk4c9sI1T?Ht}Cg0!Z~3Q z3lRB68s4FER!>UJO-lvZCnH^H(rlLa`3g0?(+ZoGh)17(t+TrKa^Av#h$+tk=8xzX z3X|1+xyI_F$433xaq_Zf?w6ry(xLnb{e}z_(_$&lFA7QVgwx`Al6D5x*b&K`Vv_{ zE{^CZc6g!;67cu>;6t#FSl_jao_V1lrRTj_DjGw0=Gk#b0)o!%NUA3)Q$^{;p%E7*u&Z&Qf3V>_%MyW%Az@i6tdebA=`H8TFII&NtresjL>q~i#;z8ADk z9%!xbJ;$Tn(b$I9NBW`4u)n+LWSgW5;=T|8Lkvv{u9?H-CiL*;}Wkr{4n|2uYS_P<> z;{O^>G8X-mwGr+lZ`g0QDW|t3(!Kk`0dReeo%?sib%1EFhS_CKpl75sibO3$FB}=o zBF7?xZN{Oe(v$~+q|u4Y`E!?8o1Qj*#}zXTf#Alb7@UVg?;QPMtIA#TX|kbuEdA!O z4BX8MD+iM~}!HWT%g5zGDMn0T|uF9JFJ@3IE! zdtjy{F_{L8l5&u-o{s^Bhc zz(ExYTzs!`$|E4PXl_s1SexyvLEO|3{+cFczSFGnK=SZ2uI^*5aOd)&b*Ku{15jke z8=v#mG;-QF{9FwDO7A_B2a%(q#BtNdc-(kMFDiUpLI7Vg9@;>?SRBxY*Zm5QZW0Y= zH@qk%XF|kn$g?EH>#4dU#JT5_-iDJo3(Ydia$*~-Cars zp!o@ec#AR;#n`!a#hQ}rU>+%<>dA}|XR@!8{o}yB=!c!%ZTzXJbq6@2pJ3^sp&+Km z_6JICE77@qzIgmGXiEDfM8o5TT2Enlg2|*58Y1S+_@s~LXyGs!T{z4Qn@9JylZB8( z>u<1>TL1zRXQ#t_^_|P=mh}nD2Z&`yu4QojrLu~8Ru7i-Qx81>_TL9tjMbl||8zf^KA$u4 z*P&-we4?PcFzSo&98PuasowU)XsL{YNr-Qbk(cmwwp z(Cv8g9x)eW-E73f_}UorZpR&@@0mL2I_~_mwej2SBPMIb+H+$LoeGSP1lpeeGRXH) z&fdFi>)N;F;6mG6owS`x4NXTA0Thl!>&wxd8^q4QV7FF+8=Z`*NeTd9v=R(mC-9B) zVYW#Q8!J7P`R0d|P5f5;B4%vyiSexVg{v#=_b4pwP;7$gm4h^B0y&7Q?nXV;S(o>k zV|$=3mM{|iN8dPGx1yrr7Eb zV!rIt>-~bP?^o`42mAQ_`XR~gJVUBClML~E{|-F`4jReaBu28gYIf_b#p5r&q+JBD z&3%%xxbdobWPHigOyfWz=vo6>SC}tJ%$m+F!;CZHUA_ocIvf)T(JpRVJJ*4tzaGqH zf#sUYnGRnYe@QNZzc>P9p9c$6EGCN{#wMyXGkK{lfc5c=HQML0mrP4Vfx@Ilc#c$;xarCbku$HVBKd2Efd8W7gC8h^8@9$W?OpzSzXrJQ8L{A3dfdz zMyg?Mnm6^yY81)a@5|ByXOrzG%VE|zMHs^ad9x1S`c^_&c89*i zFLFO8L!Bt#-XX?ZQSJbmDzcab%!VlEKA=<9Lh0iUR--hgXK5AGGklHTyozV2e&htM z>~)kO@3)qV2$Ka>S%veW4-|H_rKt)uqEa?}oL;n!#*_4ks$k5yC=>|dCq?z=DI!%z zNgFxWU8v?%6=@gvE8nAgaOy*5q+y5m$LR9*>2|5g(4nWwG#|_rPOP*$%9zmK`<4HB z$mB`h=&*$tQm3 z>++87k6ZsPD@dWFOme3H<_vfYstT9qI_#GoM&K#|3a9b+G{C19gDQL@ekf{77qVYg z7E*-ySwGFZCMm;=$OnzPJlQ{{Bi$&wJ-D|Vw(aqsp9dOpvgd6N`SxE8Suyt zxLSA(nWEMCAqv<=HP-Uf-822Dkl6Neod}NKQ&Um5wW9i8XL#>;9)>sty&UIkO2CGq!t5?!LFi!aau>cRF5?D8AMq9jJB+u){gR8E+ zCuQCEHs8(HLkC;DIlAa(9vFDstEcnc1C6B72nvAmQZZLCeWm}ds5?x(iOIOJP(juA zdokX~UA~wY8UnaIF!Fj2<|-h~UWD0=mC;F~kiKxe@>sp-u*Ya< znJhFXyIiM%d)CQwz6M9Dn%j{WLuA*iR3L9i4(~={?I->Vzc)z>cE^w6A z-$LBBD)imjIB9P^s#BtGgO2IIyTPI1Gur_duPgwGBg*eegsg{}hWUCE!y(}Ypqw2e zDXAxBvk0V8Sf_>ZoZU9(8@{vB`H9U-093J>Q5{MFWrw80AG8|Ih!YWmGEq+bYaGc* zluS~k^X&~<i^a?8(&vRJ06>#pM4;+0!&Qq2C&)#ZXPO0 zO!7ba<>XrQhUnleDY$dH#KcR@!90n}$%M9O=hd}CcuKeR`mBKb?ar@*Ov1#S{ zT}-ysN1%~B4m0S~z9pb@eQZSDuk{vJnBI6w#kT#9%rE=TYrRio1Zb56VC``k{p6`_;`V2OPDqWUQrwn8 z-TfoBCa#Sp?%b!Jr_tA|;&M_599uBE2IhI&LzM5>Lw7C-6a$uUN)C)6nKuniuKNV` z%Gi_4b~#M&jFvli&Y`!o1bMb`qP`E63C zt)bX6I+eq;6{)gI#>U2r!hw&?@xVCcIcrcs|7Vf>i4X^iE@3wa;0;kH!g*ZHGsRKs z`ptz2MD+nd1k6VN7G>QvcQU7tsZ8$E-&qE>0sy#+@Br+v3E=Um06UB?0TH%NW$-9! z6MuH20r4hv(_eCl{&pa*QgyJV*!lkuUbKOGR5Icf1PDC<@dZY%%dna&a)3&4_0%>S zV+2iTh{#gUl}|t``XbUx2axpT>w^b?PlK8iFopk*N)Vvx7rKz8VBBl$2O2`%o~MOA z0l|T0NdZWKgE3|ScLz!&X4d@O``hM5%OL zyCogktsBDGm?u^i7Hl)?2=2ct?#h7@|C9>TB7X+-_Dn9L=GGJ&e%g6eTm#iZ^9(sr zz~lvpk^L>aa_Z+2ou0DGh8Mt-E<+7~=-e{O8Kjuy=MG)Q4jCd3f^56b9xAcTLG`PC z=Fukq_jfNnoRxIF?O`$hguNrD#wa17Wocps+~A4P``-SW zt&kwWb0@5PbBO0NJ_`n#4Nrrk;0pE_ib-r$^bg$a4C$WU=GB7uxg5PHVH=H9-A1U*V8 zG%zM4%0nIS>AU#oeUW=39r;1uJa5f^ksCNPX3W{STVf9tQ0pkwu^cNzA(=6Oens~G zy8??zbV6;X1v;=dG2~a2VH`a|8@OAqi!ylU-XTp02XqRu9oRVNPT<~@6{;(=_PH=Y z@_eEC1#bEix{Z?E#5@f1+zNKmb_msIcB_sNi)?4xS`Z^gh-w@+&Mjo0>{GaJ*1$JG zwFM;yC5u$$KYE9IJ&%AZ+MGtO;nMNA({@J#r>1jN9a;B9w&SQu4|^s`vJ}m_fPC$V zM|vhWG^*9M_8_hIHKjX2M{RYZw!#GTvEDB4rDA79%L~5@`1*EH2}1iRuT$Cnu1-4S z*|%`jvX%+*;4ZCS^+!g~$7FKZA`FVHUSWS02&(gUausJEhYW&wLK$H)^8V)89Fu38 z+UGt1N+vWC?DmyDVT?gk6WsoJy`y}h!|?C_c(U;{)495P(#*;FaPDUY%DiH*9R1-? zdId@uh0jyY_gv1rbn0>#2<*ZCuE687mxn5hw-Xt{-4z?x22oNNG}inKbjeTk%pj!` zI0eed!$hGx8v4n3&75k{e$(F1ZxWA%^b8mrk2s0IL=q}BVsObqAGmFeW97JoDg4XX zulnhw<+o3+%|yZJif6xv3WnhG)?%*yQSS7H>_*>Ag5+-mLpbj+_O#|RVy|XmQm?X1 zS(EJ?zH;~&@V7uW_K1UbKDrCsm;nByv^2(nv(3_eNlqN=C*CnU?0j`vk*)>}Z&%Dg z^4>$=Q`^PxUt{|q$mm)3(w#>@+Fb9AaQ?bNj@QrL|K@M2^d{AWfauetjGWNA6D}=f zthz4w{xI5>BJ8J}6%83cX^lsO;LqoLxq>UeLi9Bhw(lPPF?Mt6 z;BzIvA!es8{zJor->;-1W)QK zwAuX)u_c6ISt(6Iq6gqESMe^8~N|98csLs*~m{J+6@WNkAA zeMA5OS`z%Yms(xl7`E{>@HzN*#Y&aGaI?my*BT9D;M7KjH$($`h*;R+UX>!#LxYQ) zku^7n#DC9EWY(5Cul!3MvMYt`eM3er%g@XNA4U$iToGr z&dF_F@PXqu>Ol!~%o@xQ%`<2}Mhum6_n$k!rN@7LspzA4PW+iaHTDUjDLd?`rFH~6 zo%J;}8GRg%E*_vQ=Mj%75X4Uy$C)2~K^@$0MQt1_kuOQ~>J_uZWxBt0okYr5|LLvG z9Us7TZ;`f>E8Iv*3Y7+l#{zsolhY)PQ-@Cn8OEU`dLPINKH}L{=G3sGOC%w-acys4 z;OtbDz2ixwN0!1XL6N1T7H+HsaeyW^=%IZFgi7_5yi=O~D+Qk73Z8vXEzcS@9O5YAjEo-N^jYuBCwK%e7 zk6*ooT5T2;^ zg>L987-Q}z5EEsY>kR#I_*)8z4cj&&ZI{8MC1!v!0{*t$hkspmkw=ctIg-+rq4sE@ zB(jyJy+fo&dy4KFr7dkU3PH)7pn*md(58tx!zWC^>rtA3qQau{h=T@u{@YXkXwwL|6vyIG03S4KBqU)?Li7d@HeE&An3f+-e*vn^$1Jf#XauWz26Q z8;CbjxofR1dV%^`;P!?cQ#ITQ&#U#Id-FKB-Tf}m+=LI$U1-?>DQuttm>uWP9SKxx zOd51T9sW2(Qk2@XQm+D%$VRGRsnSQ*R5&PrG^|i1Kfpe!tdB-D_j~RV;WK$jzQ`UO z2(*S`(Q9``G44q zSn#Y$leYrjsR-0ls)^K{{sGihe}_RnR2INXFR&j^6V70iADeuO?S#@BQU;6hL0$0D|*bbZIT@AdQGfOyaD}g zFz%nUwcLoSm9%5xBCeF5(R_^GAoB;DE>Zk*-J*ne>It4*QG18)6z2A^hDPz8r&QZw z+#{zRvtdI!pz20a8gMps(W;;dbD%;z@3Su$)?VcHgZc2f`*y>HGw@Xq*1kxIYd-~= zHzt(2ho`2cp?s@*jgK}&uV2I$FtCP3AZQa_AGd=rU>hf ztknLCEj$gUNv=wJv2QUWx4gU##5qgU`vDFBS`EmF%B38`Z^Z*+EUZ_|PVBVib{y(# z)pKYW2p112FK5unsuuh}9$q_J*hi?>Imb_Hn1)M2d|8l|)qm5v{RFR}nNUbW#v5#> zWKJ!v9|DzED-YB$B)OU#HJUG-#Xv{i76*29+ZB;_4yj%rn<<$d@93AK&glaivo^!Z z{{F=Xqp@<^UBH2`Sl~$C3EiT^O%rc-%^F3(AQ-&jc!Eb|>tHFzO4nKLtf{Jq9W%ff z_g{Y=4*i&qPVP78SE(r#PBOnXGqB3SzBY3o1+#rTjI*nMqi_Ub5LWmX2CgRZHUERd zcmm)!_f|#RMG^Puf!SrzO0ecQg|8IeJ`ZZ>V9#BAFsb*iyP5EY+mi1(X%E;act3-h zzd2h2J0F4jR%`d%9240k#fT?VxvJcPIWyW>X(1H~3>u}acOA4I>*&F+5wH5f1qax+ zS8c$!GRX&lEHQ8{>V1oGrh2slhs@ilg8_zi zr`VkXw?E2?$bg3tn+ql~_tSlSuF0Jl(~y0yl?gBDFynkT-V*0{soQMWgi(j#O|;P1 zb{|kbDKCcci7_1`G+i8PigryD7bhXe9LyR_JjJWuhgVU>yK>?RRLP2|BN1&x5Ydbr zL691IU$u zsb@3qf82&DAdb1G8np2NSzG12nKr(8q_zF0rofS6QaY7jc)(^|s#X3uC7~@&kzeo=!*6KtN4!)#w2V_~G0A^=S*n-2@vO*m;qj=I8y~E54^#=uuIfq3{&%B?ux8C!> z2=6shPD?YyVkoY$i8s3%m*qO(R$7R6r|WM`(Uv>LXy+pr}uu+V0{vtkF2(|&Obza#Zip9pH zt(*9kMcIJSkWe0Cg%{PT|j59!YjvMka}3&ar=nrnv--VB}Z)WSCsF_ zi}|YI{kY1h7<&hYoWaZ8oubY+lUs7_v-zbiuHc&M>wn^G+_lc1M%-LiR&`W<)g1mu z#peyJ{HSILr&?>8%kQ$Sb6$TkiHkbv7Z4e~j`d2`Eg$y%b$)C*t(Ajfb;OJntT!FN zG3>e}=3CjJR=>`pHbm*x)M6|BbAGOB@Jl{bl-o|a+VmSryjZt*y5YTUaE+=_sfeoW zxF4ltr0FflEs(kd$KypIc?#O_msy-p&x^8i(0AM|kcPN~Vpad-84OPtBn z7Sv(Bqn(B0n<3Y~{#)Hw$(PonswcOxLah%^`PCH}n+;!3O^aC46q)IP4THk|MiZ9t zP~zaHQLj*7q&8N^GLfwL#Z!6hxtc-7Qm~sU%cK&s3dk$EB`5J|zhD9=ZXIo)wYz2u zL9hQ2)n_!pu-?+7T88m7B5~aUGD7D#+3w9cNUHg!{OY8lA4C3sP!$fMX+|9NMB%OP zSGmziM&GZVF=LIb4AW$5xvcfRpgFK2@YSoKyw^D(vEuxzol|>11Krjjm#O-dPXT0v z^PUVa?pMk{bL)R&0Ke#f%|I`ZwMmi>bOiT#mo&B2I;;sgf7tcK*`x2msRxPfjT=;U z!mS>MTfE+LT^n_^Uhw{15hMxV-KEgaJ_bV?yU9xq&|-RJudqGzL5nF)w{SZ{?#v2cUnIk;EJOPwK!=qVz_SA~^HzVsz3i zm}mRW?bf8(@VS7D8;jq7z2eu-%y$C#T3b=J1u?ENAv|l~MER+w7U*jiPOXCztg?`m z@A+fcF3o|Pic_+H!b!{~bIev5sB{c^cXv@OM{zfMOG!N;9r}arj{Im~)v`KqBf_wu zl`D2<=3`0=%n#c5x91;zYc6-#C_9rXwpiyQ!WBFDdd9%-2BIA`KrhDM0f-sI(yjK> zy@FgkYu@zDs>g#)culb*bmUX;R#8EC%$IV?@vVs_ec?#e(?t>%(!^Nj8;|n&p`kVTfpNTSt zs~H1#LdGqeUsh7L*jl6&v`mHfT=vVjNh;8&VQR@~r7|B+Rp|mjA7SLE*L)3u*^#3K zxg=x=dau`678ZDysI7F&lex{Yq`9(P;bC)vw#7EKJ|U8)@%x86-Aac7oR7lozuM<} zLT!~<;uM@64;3rHK55)4n#s-Q;A}mQYqyafIa8o-#0`xrWp6w(t!>Xz!CCBqtI=9| z_gIcBLk3;vCFiuA5I*<4r~DaGRFphb6Rk|E_4bfh)O3 zWnq_=WhTmbp`cl2*36ruo(IS$M-@x{3(%7tSX3K}`YF3t-e_vrI{xyGHlXnY;XIr+ zfytP6nd>_3sAa%F$Dw1T`@hkEwZqYKj-Io<_;#?aDTqx2fXmyaJfequf>={WUNXnbC0VJ@p@_1?9 zT(-kh*7}fXQ#;5)R`ccehN?3p?F%bOPbN;R>M@P23}9AC6u%1kc7J`Q61L<|{O7@v zw)$vLH}5u?D8$rYwcrC5cCOJ^xXY9DD5ik+CQj>m4ZKpuN#0$mJ$Un*2N)NEW$R&T zFK1}=7w$5O%^heqc6RolzuiU-()%m3&MO_P{5D3QFI~BzV+8wukgr=&|CY zH}EFBsWMEKAMmoHp<^~~Ml!2`vpmsjZ32Bi-3Rf!0IuTr+I~Ui?Pmsm!Wkz(Ph)qR zH9}Ou{whVrr8F^279wkqLuWyL8;v52RY<|lc!-rflA6n$bCsui@Q2)^ydAdS#%G&6 z_gvL&TEuT*MeTT7{dXPh@r;qZ1R+Hik)9l=#BOq3S!nx4!1k4%d@isr(ztjIvx?f8 zXP9_!?#ahbgNX#v%2#SfKQS>=Y5cV2Y!n}A0#IcT#+4_+u?4Q6Pjo_YB)fPRXa*Bw zsa|-{Me;;)v@x7pNe?>W~3OmJJdNBi3egH1-EbX6vSFFdkgV`GXkU?z)PVh6N{cz9UT!Kc1}`fD`G(#is}eQOH$z*@LsPj5ZCeq! zHQea-9kO}DQQW=mPG&Q0vqr`^pcPT+`D4i?B*9?+6@XF9(r|WVnHw3?uH&~Uw~N9a zzoGS8>G7<+=gyeDXcWypH7cDB1?rxZ-jbN?R8n3{Sx>R?RFuVy;PAJ8VeBBJC;HO- zh2cCw0?98OS=p!uoT(|zn(Ns($_KWpGrh#kOjKA=i!~}~8Bo=qZCLT*ctB=={m5Id zU9S%x)w!Sj_8U^Fgus_5@b7cGck->$w3pPssuVjRA|;0c^O7IVb@bV9C9A%bcbmk- zyok+@27%z`y1P2nq<5UnftL?wpQRlYr+nB>Y&$TXrY~CD0wNl36PVXiy;Kdc%rXy? zHi!M6`v7oVcIEGR?>YTFz^fywLOsL#?zgB@o6OqpeJKUrt91H?x4pQzQt50(ar0FMK&h}sY;_Tde;W`G$@6Ip}FvphXpfb{j1RF2-dy?{|mc#j7=UE6@}EVqpV zcZxsZ)~^X2Hcp+Ob5mCU#Tx~;dhQUPD&av}WbbaxP_Oa{n{j!JoWnKAm`V_anlJJ- zB3;D08^CDJML=QG$BEy-fP6rplrXzdl_t|hVm0Z52G#;`!za=$w7Q5>7*y;Bbb@Mn zq4(qB2SC4O0#Ithi(x;1eG9P@z(Iqfs1^^1TIj0zbA1>Sd*+TG;X(ldeqnmiD`fV6 zf#N-j_V_)HlIaA%!G7uE&s)69nbTJ7W;DflFkt~n!{y}T=5~!2z@82uDz3D6HC8=t z_RdeBi3m?zrKV4gCZOE6N!&-^Q4?T&cN6EqS`!P-6FH`NJRPIl1^BR_szQ2}MR<72 z0Aw%-zxfgjcoL&Ow&0g6+Lu~{AFZPg_2}=-_*0Sj37swx<{U%kWbgOO4R!icC;%PI z?IGVIB7F4-j+grl=Z<||dPMP>mWLWhnA5h+jEuwImGGAglYgR{6UL(>u17A?-4vGt z%23315(aKLCcpe_>e)q$kKBc?a$kvXCZgxC`|E1t`V^FdPZuOkkk49ku#?h#Z5L$_ z`LcbL$?wnEGrE!O;m6&F+}K0zOeKwpHym!Io!#b_c)g7oHnubcbMazy_ zSLk{#t?ruwG(qrBPwCf!0PlEN%cO>^+}}+6x@W3I5rjkWC!Uybe^Q=&FIsFuCA-o4 z2dKJt)B0^kQLQQ)UYJZNeKD3Nbm64?>RxA7DgmO&mE&CgDKL(VsX`JWT!vs zSMwyBcq6MDfZdihda44D(wfrAk9(8xN~a{c}Wl@|?xSyU^ybOKy~ekHJ<)wgFt7 zV~f@9{nq%3*j6j^Su$2|fMoVD_$hjDKeL?Lk2C>6xczLn#{_V(F^W}mcdfFUT_oQ` z#`c#f$jjt6!J#P+`mMtaQmzJOSGMca1Lx6J7I@J@R6p5mUULJ|g{rV781xR^DXAM)WsTc2YNBj2fhTu}4M3~O04Pw1;wRH57G~Ed)y0m9LRNl?=;p2W zp_1?!pigyu+iLB#)4*Te8f04!<6U0Z(8u6*R%Ln;mU@u2;;T0=adz^JH;v+Yb8cjf zMptPry%+_|d?(OvilrJG3NgT%4a95YVVR2uu!oTNPFpL^;3B1gqR;jAzJ*cVpwVs3 z1TXuiA$MKwQaI!zo=BQvDjz5{3%K5Zi)3?&sobTLPm-P|s0u-^&fmWdGtc)b#>M?1 zwnAHWSzY1TYx8WS`E8(+tPWru(!yL>Gj_e_aCC)BuV%a%EPI(!i1GCF|6ixA6ukZm zIlb#>JUvXSP9IPb*(797Jf&U%t>7f*)||cKld8VL5+VC9!1FBeo`>f7pg*$Ad$It_ z)xAI!97m_*{{Xgb?{-gFGPW5!o&a;vaEy@f5c%ahexqo{&?p8o{e=&ztM6}@)!U^W zK!!rO3!vC$T|DBJ1wIvEU0jM;|Eu(Y00HlPt*5gRW^jJLx&20lR}eIF?9LT%?6(7X zwExL{zFlTU#}a1%b^xFU0K$MxV^^GGNiC%yBR{0z23v%_qgZy@NpL@zKRLI|HdlZ2 zL9SIoOa6@_1ROa^%OEs04AGq+aD_>K)C3Jg6o7`yA;Ox;2GG$g2_?i4H%A>)YsEa$ z-}6-&063%ITlvdw$~;{JX&Mw{mvd^mPFxH&Myzv}MYSvNb1Q(yH+8`KgE%dFIjAr5 z;B%cpja5PPm?*QZBX~&k)x~M?2uk)K%3UO+H|~{1@)~!fwCZo_Gp=knH8-@_cK}Ff z7Q28-A8O6;EbKfwi+PS~S+E?LvC4JUZK|P3w86;#rAG-6eYj(&6>74y#n5|BGbZ4L z)v1%q(V=|wsFydlk5D>7)U8NNzWmnr8(4RgY86~JP6T4=!VGxW5p)g!?!cF^P-XP^ zN5{DB&Oe~aXx@qiVGfSDP1>|Vr#Nrr?16GVm>g;L_cwNZI@)G97tjpQAsB*=)m>78 za*v3{^EtWgvHx?INus9YQ%U(Ts=gMmVpI(;mI4)zd_s3)ipX}lKTxb@C*D#UEDBWa zLdAVQ!tyx5Pq_LA(-HMeJA?iM?9O=oj00`yQafuX7wJ*l43Gtw$+c9!9Ui)J?g%RG zu+nCz)OtNjA z6)piF30RF$a9s?Zbzezq_q~leKyb&e0TgCrKns;%g#l<#{PspMmSE+OC$nspd!X;< z@=)uCRXJQvUndj!Lx(N}C+oN6U2dsxyxfa^u@uNN8Km*9_LjVp*)`>?7nSt3hM@!* z*?on4^?EBXTVcHDm);?UM@vnreH3g(dBR?wt zyb*QBUp4=guX>QzF&#1f(}Awd%jJ`vdzUs-3na$#oR+2=#D%R19+#8Y@r(7@j#O_TiTVMa4MG zMBOpk8xu^#>u!-2(1A2G>_8U*#T$h4 zc^-N<3J>1^Uo--p_c-c2uT+E3`+h4}ZLsMUc631Oa+(4xMkr_v&Nyo=YpklkXXMG> zi;%)m+ICk)5ze3$(H5r?gMP`;WvUC=p0!x>8sNhK;)xC4${!}Y5|o5kpV^gY9h-|P zRk**k9XWLhlB*1t462gZokE3;efUC>O?u-%?$u#G=j`x0#Z7`L6LDBsa=%OIAS(+{ z8z^oxiMmRup8GW%;6MTYK?;Kd&Lk^g{p`cmkv!TnG3LKnEozCUZb~fgZpEToihz_oz|m zlhHaB@(a#sqOcdtnUP%?ajk~a76@3|{}84c9JlrX^(_KxX}-6#%4Czhl&BTp`J4Fq zLx-bwypBju@t)__no_znUwYsk`X%$8dzG{`UIFwK9MWX-#CEZSJYZ!l-}ODp@<~2{ zSGpH?BfSUd#nTzQooi$8qB+>$>B}YHvV@1>j^*C~$o?N$&hO%sZRw6hwmUz)uXU#U z->z3UY5~g+aE%A&6_{WlJwQ+bB+iQiI3YvdT$am+H%6V7J>x~&v=P@+y+W}t58CWc zyz!8?tZvH40pei;0K|s+L)7~OVRlEM3J6ocd)hiehkqZfs+m>=30cbk4+Ki1%pR5V zQS<;xENz;@cp=Ph&@fIb5rY8Chf<>1im zlQo=uVR*U7tqOy6t0jitPUQGByRo?K+m@Yz2qoiW4drek0~po0X=c~H&u^#7uO z!x%z@@KM5SxcKP%ElTb4^AqS=CYj_Ju`qMQbc|pDxx)@K7TVRAFW7VUl@L>yRr}pR zoB}Z^HQc#(Z^xCj&t_55?n6=n zLn2X@wLa|nx$Q|*qf(xQm>aquFIWIt0M_*pk@L*kL0`jijoORM!KaxPzGC247v1X4D{b{}NzRkAg5u0-|yi;@NwWPYhae93)AA99?q2oi!Wn ziS={e;Pzt>pI@`MAYW87zjpGD?V25^6RPlciM81L8HWQmiZR3QP@z~wLwp5x1lIJ9 zKy-1%Co4j+FVGlM;z3{X%4#%;qIbNn{_0M{xNvt8+-{8%uvo&29U+Mpq;?b#MJQyV z3cueUuU>2f!?OPN*<+KDP_=I(-KvqpxgNbkE@t!>ME^+ERt=^lyP9T+1qf8&Up%<` z@8iLHz&}@tndnD9=OvGg!r41fbBx9JJnrB4+J9Fx(l(xW#mG36$`(XJrZ%7EPJM?t zJ}M95+~BJzSgx`^V(yAQW7WVtk8rrj$AiB19@`zx=LmlP#6A!PCKfXJwVsS9@!3XGK53_+-RRwEe?{?-`vp0-b>>I7-gIp%X@S0q`udO2`hzUM7kgu&!|S9 z%lsOiH9tngkok`U4;b5e;(V3mi{MQ`6FI%%FVNq8RG_p$*FmAPp_CP{C)u)t)+>wu3HnZvo8`=O>lKH8@+Fo~Vnw>3eoB zLogZw_=e&-*BcUKNOwUmfBH}nYW@11mM!{f_&|5)K#Lp z7ozR#dIO5TDy>ImAJwg5JW~GT_4XW_G%zl z67Q42qcmB*@=!iF){ZVn5BQ>Vp-vSu+j>l;d{8n|GOqgGR};%){=34?UvF#ORMaIJ#*#}?~Jy3aBo*{ zpIv~aH^7AeAv>S3G(ksgoVkga#%JtpeyKR6b56tql&^(+R4X_SQBlfJB@SEqssM?v%y-MB#?X9{YXv zRDx{zbwMw(P1BnGHEe$8^MEK{)o-dyLR1QZraJ5Gd7}SkdJfjeCxe3>Wqjw&_l!W^A$r?OOB5G3C-wuC){3@+tOhz2EZs=SC!}wr~fpC-`IUqbus2q zNO<7Tk^K0+!A`==ES{@10c_RJ(L`(&+-`-w+w%tgR(bo^o{~Y)uYDHKR{!Wb&}0Tf zxAw}I*#^&sWgs8JVg&c&ECp*Znta3&apY4>&Ue))N`0G%8xfPS0n=1ebgV0+_6Zp- zmlJ-x=BA%3f}~4+Ib;&L!Rff{&;J(}d??BkjpqQ7wi|U7B_W`LA#>Vl8qvn@@e%em5zCoSd`Lx;1ACcccL3m8MhnBC+n1Qb-FY@c@tLl4Z zNM;s5Smgp{7l;KD8M&hY*w1Q092j%}%)>-9P?AmA6O0)g1Z|TOUg&s>*QWWc_V7eM zh8GB`kW*rY^0E5UPu8z|&m9CHWvzBd5=^@v8(0wF;hJ_aci01%9fAnjQY|>3(*Oia zgKBniwdT!kV3~5c%z9VgZxab9a+TrU&Zr>zLa%j6jFXfY9~2}f{-AUOf{tkg-d_G5 z5rv`QK!&oTz5n_)4g2Gg^1co8U9x}!_h9BX`0V^99i7Q`wF^lr_m0fQUJRR8C! z%C7kXeDFMZW3?vswVFsZR!vPIfsbav1GS*6V4m$yNO<7wMI2I^wR|f)18A<(Cl-m{ zg0&RNEAcEaYh?^Erk?t21XBRheK}HC5B=&;A;yPV44NeKMT188hy51bCQbdBB^{4w z=nuRt7jA-aOMJ`_QK_CnA5+i-aUvqqR)%M}UC`#%y8FT5q&;bTXq^(@_^v8qf$|T} z3A|L2g@GAHjOmM>zlU>q;S-?GRQv^=dq{T?2q5*)F;8ZHZk2GF^wL>BsI1QVkaHT; zl%~FaSI7X;fw9RzUTezfy22 zqgTS<=UzkZTnn4Zzthz%G!OxC;mJG2noHai#)3#41IU^ywExtbQ9F6-4HPsjBm9ZU zTH+bc80a2jogBbSghuNAirTEwQeb9h(x70aV+YYqJX)iM+m2+X7O^Pivs3% z(>1P@c>%iEG!B_(lB0qbyPr)TL46IiCfSlEpG|szBxx+&#Axi;=#_?oVEyCf##vX! zs#yOQl$6d+aZ8n}D4~`lsNu= z)8KT!#~vMIKdWxb2L4OlAQ@m&yxuVTxc3qOahuo%u15P_o!X1WM=sS2LZI63;gR zA&~3r^I$r291bxwQ4C9=cUdtFbOR~z4`h|3pP%I6!ud1SxzIl|wOPvL=;dEfR+mmL zDGq>VFL_@*OnPPn9N>rU4wfY;)N{=6D}077?!k18OjX0W`>l|Y=Wd=B z5i}>Q2-M@plL*Ld3)C~Bz&%i7zqh!-nz-`HrdONn4H_+fRz?q)8h`buwk7?F{lTnL z$GwSPJ?g9r!e7L-8ThzNUUg;?))LlwxQINqkcFWpDw#!th+JyiY219e{%q#&p+}Mq zUoJnXdmzPq&F)ci@saQ-p!Py6{y=m|3=8J}{KyTR2?(U`VE3Elee^Jbnc3+Mv3C6L zTtX7|KO?6#^`GDkL>voTd9{5hTan(?y`AAENoe3E-f-tc?#VB_wt2()&`p*1OORT* zb78>mzB+%ozV$Qy0t}s%Uuh0s!~gKHLUXU;`8Vk0lxbVf`}fX!Zu9ohRmIecB1Y`T zTaUbF=7ei)8wU+5;6awF8XnY13j`6yOpjjm|H9A!d)3-n3VsqE+&G?WR`4mB;%1^A zROf3uh#I#|iS=yRs<`IhxP}T^ddKSw-MhKYI`NGh-9PSBgV|KW@6y*Vyk`BNB0+ZI zyNfp+C`4`frg-#Qt3ydo?n&0ANkl;Y!C^nTqi^<0n)wOAR8roTL2X)`F*X>I@)a{L z(e|~SyuTwep&3qi>EqOf&V@O_F_DvDy{YL{>yGh{03bM}7~=f*ATfq)t2hlj#+DM! zNtPP7;*9wkF3&@mBk1U5{;{Z91PjOCUsKzO5LbOUO?$+@UQfMXvgkMl{Bsseq7KaA zcOpU3ulo<4+{18Gx;-0I26SQv&<@7R`I5OEuM^sP@!KYC;yCDO zm?N@!B!YZQq(H*T;I{Np(FG*IM36)~l8uTPKs7@T;6W2+01=1BKWT_-l`$<_ouFc0 z*^30j^nL&ZwH;iZDa3*E3+MqBs)TjAiM+L?h@I5-S4zy=Oi`oOo*-R{Lw73A1>Orm zR+lOx#2U{N(v$};ehkggT}SkQuLV@;r$5yGU2|#FKgR|AP~USBT~uZ$&MQG1_f4?U ze~dBM{jS(wV+i)MEDg8Kc*AudDKolEGhVb+T)b+0RX8zp`p1-NA0KphWUB5^kD(tu$zk@N2U1yT$Jc zt~6Ukc5c@Ht!n^CTYa(s4&B^F{ky@~bKvoS&e-HV$t^7t9N2%;#ycwV9dW=Lgu4OI z=a^M?lVUeXG>zyksassE*$ZX-{26I{A-_N#$lWZ?ypo~d=y$UCEz&*taIU{Py)ZXU zQlES=*Z=Fa=BK*E(o#r$hD;P}CZ&-*v&h+h2|=Of)wnv2j5+u|bep9B*r`2QgOd!~ z*;^#tn9~!k!?x#VUJM&HJXN3P+q|JLzM+9`v^Jr={FJSha0h224B6T!e4Ut~BY26k zd<^_f?{pjj&v54DBa9+Jbs71^X;-Nu{(}s67%0>Rd9(}TKg`Qt0I#HCpd)h$Yz#4y z7_aPS=Qa2a!F}Rg#&?KIk-yLC*BX48a6T^LfN=GJuYJ5GIu{mX?`(7tWBR6*--H&D z^&Gn`8mRMIOAW>nW?VF9sNHU$fuycf`U#ji%7_K^kdxG^g>esK&>O2G;qph~#;j`U z3!|chD|4qYyZYBY6in;8{>eXJ-mK3|v^P^#5NGu#jcbr@gp1fyJ?`kxr45hz>S2J1JfHR^aYZx5^}64 z*Uu|ss8Yy7Ug&3Od`UmLhrX*Pl;dr}9I>z03Ryq{^!K4f!)TaJ=x>@uF1~SnVqeq$ zVlnxKtvBR?a%J_OLJu=uP|7u!X)r_BUB7*^&BNq}JjAgx1Gvb8Ue)+Q*Uh?1PhJFM zn2hJ5Wd%|A2qU!TlU$1*HAn%-%c)oDH8-*GfeGs~1i1uyir%IfcoKNt02Ns^vzM@$ zeu!qZOL~DA3?d@i1Aj|mGcb+rd^4(0-ys4%^vN82Wo#{ z4ax}{hK9VSSNX5nqJb1F$2PlDE7qpu$_xpEY-=foakoMll8+o?0G@$7H7+U!Y+``# z@3jx3${hw~U%LeXIQv(khKVEjzg?_j?Z0wQvBi1j^>Nnsa3I{`?zmC=MD8Ld6$4K< zkv3yll3!9NCl&#U!Ef*^%F>;Y;8{%h@1D7YYe{tmAK+YoqGVB9s9fo>=^3D!Y3 zlj3g9UJmYx^_6f0l!r7$+#D-^G>sZ-Cm_rcRtG%G!n#Z7h1&c?^c-qV{tEv?EKyk| zNfmm&k{TiVln;cu2}}XS)cjk9;9CiN*DMwn9vBE!2AKG1pYLy;>h5>$iO(pA>$))K z#aawIG!1C%wdR5Yj&A{3pmw(eZq~1LUE0jpj63?zU|{;_cO0j4AKC=pR_9HrTVBIo zc80HYWUL8DAoMOK?=rNtRN4x`X7&oie9u1$EC%PMIja1yFdg%_6sIde=Z`5PO|v7z z6?H^!sT(LL??&@`YF%OqMg@rOH)LOz-tga#&w8HveM>Bz(1--nua_Pp-d}iOrXE3o zvD;}+ZARP@W{>0dWw1+=)&S!2P{aA6pi14O2JS*@*{g(2N6>Bl?;cJ056FCa@OW1E}Fw*tyFIIklI)T-# zwbK}oBRwTn_py#r>}-i3`Fjf1O>&iSI8DG?gEow=lA8Vid`1W(vV{Ei(@+57z}QnrnqZ zhByF>9q-J7hXddj1GQxy*!4pDE}D&WXv;;sXEIlr6zItayW%8+h(o5JfVMD#sS(ONCF)oOVH~ zt+fQCM(u~llKQ-x%ntz|?5BTMu(jsAg)NxZMc4i3av8A+D+@W0He{5YOik9k7Yo#+ zRi@|uzA_i92?{h!g;^7RxZ7Htsi%N{^k&S6!$QGEF-aL6wI;qxP77C| zv?^E|1)4?|&->xNwfIrESF?nvMky(K_nl*>{zjf~u-bL^(qE|E!THJPm_+s$VPFeG z4E3nPMR})f)G3u*i!gg5yp)AT$+_&aNowJij$rrq({Hd^zejd&d|4_n_t}X7m)?IK z%nDN>xt-n5HZeEfi&6$ehXq2?BZi-B1;CU;r9c5#B=&Yb`lgKsc5lgYg$O4%%I~@C z;=cb==IMJ{JHj@RRNP;(;?f{+GTynV?5!uNx55RvPpDsAiw(;sTJLp0j(s*l8};UJE>rS}Z7M-)$AGoP?q1f)D4%xyP9= zQTEQ9b=Nci1AFkqR}lyuGO=|Gv;d%Lgn|u3T9rc8P0MGpno>xVk5r_oV4_D*L};n@ z_VOmW3VzUY)B=824r{BXx}`{zx~FX^b%H(*im8&3GjUXnRLltemvi7edLspZRRxeGRax6l7*$Xn=mE?lAyOm&MtQmmV?JOsvy zk6AlD8D_}UKSZ(iT|R!|3qV{cWs4jPFD3BgR__aMR8O9Z9-(Zsw9+LfoR5KiW@yO3 z?t3drm51~a=T1CuNb*hcOsSryQ*5bwiojR=Yj;vLz{cKu9Ec~$qf#$?)|E4~v2+Ga z&dkI8oPS)Yo8L|h<|N^MdqGsa3JcXTqQVunwx!>a_B+uM@bCUw@h!lRL-=%C&wFPf z$`{q)dZK%&HFSlWk3^vqG02Da`lk5*u2_p$NnZTWO2W|iF+ftI6%$@v`Kr7X@Wad% zV%yFfT9tmM7dvo#csd`^_-iSglmM$YIdQo8L$>6QJ>VM@-P7WqDmpG9Hy_N#djF%m zC`{dg=t)F;lon3fb!!xO<6h_jMe@&mw*RikrYQ?8sIVC|=Nu!NP79{n92;CfD2d%GIy6@4Gk;lV&H?320i?n7ZKk0_QLS{%_B z*+-f06|(ZDZKc57HRQk&CiXq}yxj{}j}p>$)^KWVGFBOhAF-=b_aui`pFU&@oer3->sUO@LuR#;PtOmow4me#v2r3IG2`tLW1h*Ck{$h%NSm0RG^XQ(8}@m z@Hj(-Xtevj1kD0&w>3DhJ}Q2M3>MFK-ZOQpPp^J{rM=YNR5@zBaa@QcT%?B%VO}G9 z9PbRtkdc^6qa+y`)04UnKPqC_LPQzYj@;qm={D?**Jp?%d#ywAkKH_!7O)Wl}+F6fA)g=2ZUQhJVa!9+?zp#8gX(Q znjnAQ{GZebPjSq{!J95_4-mkL1WS}#X1nd!+!FgYVX=j%5h4SLUOzvYX`lz~fySRw zIo+t2I&w-rIb`{O8VQduRDQ#-nyp^EQNRj}{>M6(+uC2pcFg=RFV?(MgQGQ)6fK;Z z=2>-!$7NG!3?($P%phkAILKP4dO;f_rL~98&;6EPB?CuN3u$34jcfd$+rJ#SdYmTW zJ~oElAOz!lWXLCZqgQR-6-tXAKQOtrNYRz(Y| zC|b(oo`d7A`&6dwZ#Eo0_7v{M%Fr<90|^ESfQe!;gwsdlVj7bz+-b}5^2JU|X<)LE zrdoQPq;Uj`-|Y}LDOyV73^M6H#87T4hIijQ_THul8@L;TJPn^h$px3IWQXTLzKRu) zND{mME{bg0Kc{_J_%a=PQB@Z`gV$2Z&;Q6kW~ zQ_9&`_7Yq4obhF4D}t{z@Nvvm=?2~L9+0ODyLlOg979vmNEIzZ~@7CK$dgxKKsszF4K%Iki%R=e^!u1Te0{fussjQmk6)wz#53Sf;oQ*ILMKNam$=&U5R$xPDEWmJoDP_}cn(ofRe%+srN z4;di8w-m$4G!a_7B8N!33~-!`T<^0N@y30)!KIv~FB#E&eApym5-)4^t zSLTF zv@soy+2FAjC&|(nwh`KdcuI>KfiZIEHUHcx^;OzM#S_P4VWrBUCe|S7He!86A+E!` zg&XXP{G;;zU`!Dpyo{b8JuM-T!z~!uUA4UV>a74e&^csN!9#tw|;9`3VvVglIhz zGS%THs*M_jLUWUKu$PWwlQ_>$-#H zcY%PJvAK$+hxMC6;T^8CGhLmB9RsiN<4>=Nh+to3p-<7rNZ_@*L)wdKKQ>_;{jp_$ z@nkD!YCdHodWjp`_aN=zyn61J#N;LRP2K< zMNDlynLB+1slkZwIl1rMcgxH{3jlY*EC1|={_YNPF-^G)NGMV<_>(rD*<{q#mW!}G zK>-pU{mcEOjs04`dWdVcFPLqHx#X~mV)G>iW%Exa#P~75dsR!(ZU3&=@4^@ z|0ut%137GOb_)PVV3Vv&)VdOX%{ zjJHyGgX1hUPO>z4Vp~e(7d#o$;-n6H%t32$sPi}DY$=OgLn5&4N=zSKAc=MoiI z6zi^x3sn$RtB_x$-i*VHL1^Iz@CwHoj-8ZX9xJ#r51voocCvY-^}QW?x{Yj~AaxH~ zz>*%r0YI_ZtT&YC0z^Xrg>PBF8pz=;li?;99-xNrIOy&JK-aCu|MxRO(F9}gvBE@_ zVl#HCJK5Z;c)hk1F<1v+6v%&-p9(#f(9ZxtxW{M&4I_&u(?z*G zY(P#rodE#ClKl(R35m{(OhOndsbxz_@cP%H@g>Fxg)D5k;SFBkaiJH??G-hdZSq~F z0KJ64q@BM7bT88zn<;i-A$Q;WCkjtQ%4t*G>WOOG$xo~xTKhHrmJ~?k%fZyYhEBWF@*Trq5 z9U>YpJ&g%NDQ~`?l*ionbKl`$^!Rx~D~5=~$n7AJ43zwgi+|=6YEI5;MH%Ru_B8E@ zSq2kNA27rpx7lAXmMrOp5{C zG_)8rSN6uJU5p6B_K_>rB?tOZ@?P|@Y@XlW%^X^9YBpDTFqEq{li~kek?V9CW@M1e zrpJ$s{X>P}cLeq|H~~Bn`7=3yGS1>_Pc(vG+f;ny_v!H)@h=f^;TlnPx(4xB`NUb3~%3aUsGMc_(=lF!Ox{;_w>Il z5olvLymCTHtf62_aQM7V>`h6@TsD6@#|FB$xSEhi)Fs@F-4n!|4Veq-tvz;yf3@e< zXWvt@bzihC+E>tJ#&;-)XRjD7+=10CHp%sW21}WZXQuFLr4NDOXDm)x#~(SzZDM>A z>#HEVO#Wq|ksxiR)A+N#r@P@-m3E#DU{>?(UxTGUFE|jH!S9;%F_IGAvDc9`&Gp+} zOBftb6PorxR|tdff9oq3aQ1mh-rJ00_L2@>B6mxDPla};?7zJWz(-f(_D-D=?qc&` z6Cx8ppPn>uH!90bg|&i=4cC{pU4GSeFp9M0dVHkg{tmOoaBr_ta?)Y{+gx(wCV2qc z?YZ>G3&utoGp_OC^+oYT|BsD5#DA{$Zxgwjx_NGo3p#9N++?qZooVN+ksb;0>IdF{ zU;}-1Vu}Hf{p74EJMhBCctgLYnKpOBfU#oQgPzSqjHK`#{I1v?(yJu}x&w1POpjv{D<6pfTEL0vl(=3Z!I2rwX88&j<)KbPKRD~M1Px6Y?s+0 z!S@l$8`nGlzJ9~2Zzql1SQhEh0@nx*16kj%c+ijPqCG$ZasAPc?Dqf+SEQs;MSpSoNzT*10)iVlGiZ(jLZY;a8B1#;L}Lc;#* zc+3=_@!y}K?v;+_LHvt>*a5I{;ggOhX^Yt*y2(%_=kYUg%LD|j-M7JWCGudiG3XB9 zq_EeHE{iZo~?NKUZQ$uz&GUwy#qeZ--VF21j$6PAe|K!!sLOs2Hlnqk2erk?^YL{>8%3?D@ zqY6H)YDK(KcLin;)Im_>doHBCI6Ku>^?aezDrMAxtA_ZC09NS>`sLBz5D2133iI6+Szw?5j8na{B%*CAoPNj?D89ms=={+9R((FMIU^~GyksoyPoaEs zX|5(dsK$4RCmYg6gmT%esVSE&yw)8t{`Nv*KM(Z_(;db-pEqUAarm10mvRJV8Z%FZ zFv9-_nIAZ!Vl#|~x|tq*nR^E=t8gu9VLv#x3mWX%Jr!z%?fme;t>UO#$B8NtmN~_f zaZi4r2bawdMjr7Y5nD`)4fS5zAl?2e#;?27mI~ed;u_f;G+|O>>EMebau%AM>70O;DU18q zYaG^;61`yY@{Y?-Y`|E9Uo6ejVfE)t=a#Zz_QKVJOIKPUVA;4xh{GE^;qW%s8z>K5 zmRV?hoAoK3g`wb1VSNFV6F&>{clDVjXpewcz+jOY>exc;JtHZWhI4|yd+ixzsr%BS zh1v%8U$n(&tQ1Y~O?ghWEYa*Fd(U1`j!QGc#;4e8YQG_k*_jGX;6IUcQ}Fw|eSCc2 zydS8|lsqN2F|sD1|ALU$A{Vm{IqZsTbFge~&)7gef(t(~<^x~zla@x^aEw$!O^ZKq z(i!s4^Z3m??3!cJ+X8ooJvBbqf9W%4l% zb@NKlACm4Yq7>Ul*Z^MmC4_g0saZPKf z GVYwl9NJZM#k>M3|Ov7taqcOkvrbvjH%&8iWx1IpIEi8o~|v`5Roi0E1H;Jd>A zg)aZLws}(UVU|#4q31UTI>7diC}%J%_Bdqp@723V)TNONFrAdHV!+uHdyRK&XOFe?C$) zeYSpVXd+a@l}*DKyPt-|!lS!;WGX&7qE57AOOoDH^KRvD3)K%m$#5{p*L^P<8+wyJQSDb};9jg!|7 zAI`6dLq*`|#0;bgNrAYQX_iev@1Q7S5el1eGoy#zm~u{shQxA-U#{Eslr=~z{9oZ= z)2E`u6>Tozs!zMpHiV@N0~C4F;qy07QGu^LcAsLsP|2PpR(mBgGhxNO;syl|mV+O$_}S-=8{h^P|zy6fKm|S4v8o`#pSd zRxa-64Q(yicTe1CHJD&%3v@)v>WklC$Y(^QYVMBLINs44NK6>{TMV zi9^m!uc#-_fxhPABDX-)Z>Ut{v?qJmLuLdY^pP;1*h1HA^=(}kJSK;j1VaDqu3TdS zPx_Ewn(gkbOxBgHPCgvVKOHE4f&MhUkQu1o zQV1q?j*xmyAbnTA83ZbZNt2?047#!?>53Vw=CP536_g>FKc!}NG8i#b9utD5e%jM-mf&I{DLjqauze3~gCnVDHa8@v~8-rqI=768^BJE!A__Mlv_5BiAt z0;tnGAGMG$-}1hRJ)))ISNRMR$#5(BPI|n|u&)q6uDkGi(NWDNQBvi7?n_YU5d%EN z>173Q15^k%G0~$U^uOwmheUtnD{%uuV$U4d=p>nK@Tcz7vfuzG1N&ZdS8IWNB#2bs z5#rDf)Yh?)o!nXFr~gU1hAhs{uC;&z0K{nBTS=RB!a9Mz`W1WCa|t0CJ>;ZwKlZnx z;i}4<&%?(8ivlzI2P(8Wz{BsLqK&Jj*1qY1%H?LthYZ#2DQmR5gl}_miZ?S;E9Z41 z)o*tCVxzl3$B)3?r<6BG%*IHKS%(d6CWgF}t%Bqbp}k~tT<(3(WPBR|obq2w&1<8V zP~v{58!E3g&Yxn-ek|{?B&omYKPoX(gi1+?m6MCa@1`;n5^MbhZOp~GbHH;JAU4@i zPyJo5a#3QXiMU;Xl>00)*3RH@=ttHdvw^n8$9*!o<^CA_jnZe-)ALQt-CzXEr(o^- zD+y&n?K({peP4MWaBcyM!j4M004B{3-gnYIUSw`U8{zWJb=x5DHaYQ75Fv%Wtww{- zmb3^IACvtQM5QAWl-?Xh6XkfzLN}_vk)tLdccZ;UhzSsh>_RRUl5;d>W+R-XZp1dq zAnf?TFImJfjHTu%cmOtQGydtX|C-7{cXi8&Ns1d@N|MliHk7X_311DzMK{xf5PRUb z>fL8v=&$B91s|cjY82?seg&EbC$qPi3`KmJWJu$D!G@7&!G1rPT=ydE*w^Roq>9hf z*St=h7N6}gGn+T1_D2?hob7j}&$90L;%coYv4>r0=|ni+^1|}>eFmQ2RNPu^;20QV zYl!cksX81X)zUjVM?3pbyC5l`E+i5U<(mAYxw{atI6s7_<+Dx+`(8n6wlk0zsAF?@%uP1!Nlo_H z$3tv8eC;;1AFbgJqYG{{x)T~nlBt$Tz7u{RJru~mtYN?OPxU%;&K7z0J`jHO zqSgws+vaGD(J(aZqpqy)kMZ%YzUgcGk^snhJPD?WIMmlm`4%xPR#~4!TNgR`*~?ST zl-;muYQpz+H=w{N2?DQfxrg%=M{VBV@c?6sGvc zmQO^8pT@}5zU-;QjHTr1utl!2Cx~sqSm-1;&)L1Si{JY<>C49r$2RDW?wHb~Y02V6 ztViZ=$Cr$|b^29+bU0eXVP7AnyJHu+nS}Q@k-C(9UB76eL8~HJM1Xk)_XndZziI%< zx*rliHaGp>v^RobbUMWS>{tI2cZmv*sNjWe5GQm)RP$ZW-PglO??*){zoE~OVviUI zYDB+tTy7+GrHmf-rg74Fbs#zz)4^I-EWD3t@e}r;F;YQWYAk?6nROwZC0P%GTPP5R zZC3_#SEh(?2jQPZO^#g2B!uEp)Z!1ET-#{sUOFdjU)l5_9B{FfKkGc;3VxpPreQ|DUX|{cez}CTZN2Y*h zwOIzBXKufiECMR!BrLd7p^+X=@%eev9>LO7JLpWee0A_ICxD)hz!1OCKNA^g4;1Fj z@Qh>_#=e}$6*92sJM63ZX6(KGgUpoLiyC~aUdDM=3aP@gk)*w?rR9vQ1ZS8jx`#WE zGl}l#tz7H4yt|kEeDVwY{#;ohhD617TP3Stv(p)N`=*;eWIV{ZaQR6qP@S*pyl$&4 zPhkvHiquVq;aUeqJHQODnv+Ot>j^S>Pg$pZtXsIi>D=JR?kM_^;NP(eJzDzc7KrO} zgn1YC=M4@iVzdt}hJqax3Ccn~D7qu{a|?TKV!s)tFhPzJ)Ilh<)K`!c7QeKl(M%~k?iUO z@cn)}dsFvL^tQvi<=vbgOB1v?-Br0oAm8-mx%@Pgih;j`pMBR?Xa+CosJ|!g;d-VX z8L`0r;IJk&YzYZ)KO4K;=KMZsJDP>S^anG_+C$KXbTIJC64*&&e z%lcG41>(cd zWbBkWWSQT3MT8w3B%Umgou?VgwNd(%mvGK|0_c&)`j_6+^l+yf|3m%)GS(=~XzB0t53WYe=&fCAX%#8)3cK zn=z)Ax-4>L_WeQHeqYlV@LWE~!UR}W?}5KXe~{|lt<@1Q*C{jp z+Z(RCR=F&Ao>U77;I+!zju|uR{J7Y&{JQ853tXR1pa<#HIMtT}k`ORt0(@d;dgUC; zDXbt+zmEk+6-*C`?uI)idjf^&GD>(6InAfXETl|Z2pq~7gC4+=5YPad%9l+9wgY@I z{}zEm9DFjyZY1b#c~Q!~#!Z30rs^Fd`CFEhmG^;a5KK|-gx`0x?Rf6a^XCoew>)DH z!V9Yd4;D!*24`_j&pCYFR^%7Su=)LB&|#iYGRAB@xktQU{|jVYfmISt$1kiCKcEF~ z^}7-JGs?Vel53e)4bhXQ)+C#|I}V6(~!gfODu(aD1{uW4iLyp0p3K9 zQ9D2989C4?NA1GIA7P)7>-$*g;w1@Nw4kNzU3ukl%86Evvnqu_^ZmGl_`iC@^Ua$# zXN2;)k3gc4PeJFgVjYM^P$~e&sGqqhEa(R4p$D_1Xc+x7vUjCrmBRu_RTJemMCXa7 zPdj>WLRC-{9c82+oJ#wXBlL*xJMr$rar7ec2E0`fv&4Sa+&|{!;GKc<%J91M$4||q zai-2S+SW;!>$|9;mbIFfuE(|FUcx@LjC>*QsSC~e3-N(4tsQ(A*heK!S%E~Kd*T1s zkPDv31LfUnq4e3;@qTX`94Dxl5nI&d1xr`cA?2u0Kf4%8)Jk01c@L*zt}puPXNW?t zo}c2SggLnzzR?5P$P6tkqXn#Z{cevd#%Hu%xQL^^MgmQRPRB>uZ$v#tY{Q0d5!}<1 z(kh^DFwc6Rj`1rEqeIyR`cnsGdCK=f>5`l~b@c4R+HUi+n2~zILy$VtvjDiomUUck zrxR8F<1{%Wwgc0hb$@Fm?I6g;ywP4?P^#r3i4fBp9E4Pk-ZP-icN&-4gmvSnl(NaM zqw2ZbGQvzYyB8c24P5;0ZcA6;WnSvsHC#VY&l)Xw82dyb@T1eMEsijB0x>2N|}z$eZ>{%7A0=2b`LD@mtP zcvgDYA>bwqjKX4tD5LT|G@f++;g;(1Ii}8M9%8Nqo)kr(6tB;bc5^U4DRcJFpn>2y zO9lwn@3SN=;+f@TfI{F&5#5rl%_*d*lKjD_X`=4fSP)obNvi+(nhmMhz|O6~#;g(k zN{MM~^vG?>^yUI~P9Sz|g#mYiaD*&?uv_%qAV zu~b))_vR(oPzJFvq2@f@hbAd+jD~L?_|bIR*A zZES&{R`+Yn!0Cq2b3v-AxN%Hr0WF=E%@38IlRVI7550wTgP?LM7BNu9@ZoPJZDJ$X zpas1o)lI?=Nu33om_X9$9dP=ob$AX=1N=y~T7y+M#oij)>i!9o;7+zwoCe&2z?f#A zpL^HVto<-A;IRUSKI?Y4{nvo zhOl24jYyRH)}`210MPp`K%dwfk(Yr1eW(1|31)T_INcVOt4ekoMCJLNeoh52j!31_ zzejo#mVb(>-m8VU%!P^4VG0Qz|WCfy?%IpLQ-{h~2No z*s1WmV*`@mT@1K=`UpoQq(`AQ9Y^Za5yo^oP(NL>e+BIa5on0$g90V0+VF`i(`Wbp z=5$Qam~z9SH-{V+jN-6jcI41j&Z1~o?GkorRyKpJP3sg6E38`x;$NevkQjMbQb6Ft zX6O+R;)IW9) z)UYZ0-dq&EZgQL3KAz`r-_3Zh`B0;XDf)<%(cj;{Df1K9aw4K^tp61(p_l(&a4QsF~?JVl!tEd3SxJFgSz&~(}?A>d4z+dJ02AM#DH zyFb+rqOCfUxE;_M{#`Nu3_iC?8}LtpYa3hrZVCADrJ?Y8*ROv~%K6|{_F1=|U9uuf zOh*1lb(o<13@|tDrtI(M7ElKr)oeF5!6Po1|KNVhKw5T_^;?9~SGra@>m-$LrKzNx z6*rNCJ#O}9rs*u_blh;gmZ^?f+!Gk-@(ZEheGB}kU{KFl_JvwKH4GT1b=z2`JTW$M zf?{_>NCSz|rMyaFXuP)?lN#i!Uu5wu>b@l4=nqP_6Shdn-k)nk*mSu)A042rOk4bKH?Uoz0 zWaN$%wv&>QreT!dKZkv3viV{URN$4h$_)f+<^bU9%|B z@_w-`&meG#Vvn4vY_kP?O-QyHkWz^6P|Q^3JT&&iW~tEtJM?5M<8Hm_sAn^%Mpp93 zIW0BRjL`7lE1}@G+e@lm;`Sb`omk*BMlP`>f8E!ebux4Ocf~K=Kz8vb+Dwf~r1(R+ zgzWbDx%ZUWHhs!d-hc-si;cuI<_|pZ^9uw_Cg}Hag&G5BH{~rgrO5ytiN9Cm<8RZF zQQ$e^ua0}l^Q(TZF-pDtNe4BiUBE{wY)-2@jF@pR_rpESLcWqK_723rJG zB{WZHpR)rPi3|grBh#UHNw5CK~qbD0%vXUL`zKQMj z1s+=YoD!1bCmf-0>PFOGPhyI>x|5r0u!@K50l3@J>GuJ~V%w}E`il;z^6wE9OrUo2 zb^oG+=KENT(p{3qXE&J*<_If$fF;sOjWt=N`Z$UPUvcVTd zC=^1K_ASMbayNITHE)otFa9+_@kLu;QRjP7X%w?-b4?ygqVsk$eEAc)gW6Toney{j zl5sh2nHKmpT#=HIN6rx%iWKbsBk9ZInmE6&?bo)rA$36%khE^72vGq+giPH)M2L!j zvZad3CZx!|BvY$^B9ICdlr6QWY(hjSPwRpT5*>G*2^H0zMzTz;fNV z#LuiMJSwgz-|sYVG3*was=Hjph>y}DRr!z)^C19E8iF6hj?yeyalpPL4m}}bsaApg z6W1e6G8kVmxK$qeJA|2G*N7%kRbs?|cV0LBYqEPBCeZV`M_hii zMu)YCdgZ|BGuNWK(k++KdOrNAH0xEn2v$>|^uuKGl~Yg$i6OW0Ki+$+gZWIF3=tNf zLs2&>V(97gbp5xB%rW?6;^Np5F8k||011gik@Mx?)u90dZ3b7F911w4&y)5z6uoEr zQs_vn7j0*JJ(PZmJ#{q1ktyfaN5MFQQ$Cl@)a}us|Ak|#0?YdY2zNn!{|qx!z?>og?o>vFfT9=V^5P*)s0R`BC5Z8iEPPjN)xz=}JQF2baCerq zGQiXE68~%tt87(M(0{!PwyI(#cp100oUkq^Wucj%{$a^kn9qr=)tK>Cz(TOVCIj9} z;+%jS=MFFDeSBSk5q#30Uz%w&S2^yd#&Bj-+)@}4n}Qh?8w z!lZeTz6McMDq|t5;1h2~9%^dxmq0@JI+J))F##*Y7aP$ke|b)pjnjU1>pxf<{|d-F z3ZxX0V2oeNIY;7W3)mq$2$6p}j?N`~a`}5kj^v@xBy)mJX;7KfAJ^6q%4j~Dj z9z?3A|F|~=zm$!CuQ%E+<-2}sSvdMyc$f_F`V`d5m*}ZyH+H}A4A8$?c@Fh&$6l;# z1)k!r;G%r|x{RwUQueCS$vo*1itrv~V2=_FXd2sA87e4^; zR_3QcTsjAzhyl7tCeU(M#e#%*jadNX=nLy{8r>$Tj&lIdYsmImyO&lW$3`wNW%~)p z*60>^O$l;Ilq5UBG4gu|jz^ zBK7P$3wd{pbttqUMn_crwkMQia9N!E>eWn+CJNzp#n_7m`9NjuSq2P#IW~_g+Gqk0 z&dhGfIna$bawwYq`Y2?I*NGTvq7IUO7Vmps;Q6=>SoS>k2IfH_bomdlvKleJ89gkj z%pR{h?fKrvGzm|T4Tw))33sXy)0-&?;bvs47@rX16n^X88-j18Ru~!6^=3_*jumwN zhC`NZqv{EK8|XEA^^W&oL-!L+*s(c3Xp`8FjSOE!jFN1w*>PXU@b!FTxW*9&JY zp2}}}7&k_N`*jfbrn_ESJZhP))ijW{`rwxt3t4S&vevp>y0WS_=;|AX12aH@64*2!Lo$$~pEKa4x#vi9Zd2;{Po(_b{%d8v*{P{JB_$zOG9Qd_SC zh5sk$EfL(AErocRY~1AbUppo%I$!iZ(OtashtbDr@^UF{5WgPTBnod79NVd5-+AYb z;7&lIvovk(#K|qkY;V}M&jh}tX6jQNr^%bX-Lb5E508|??<*+BsWgN{!aLL{T$aK& z$aESaNfwSBXhn{f2^dSgG16kqRy!>L!}I1F^f|3{Fo?iQ9uCNGF0$FDpl$rErWV*$ zWX#WrSA5f*eEr|-bBQG^X(kW^HXM91G?qp$w=a2aR0=9a)9(?z^TRK{`Gkk(HdHAx zDz{xNFLqs6veXZb*;@O^3oe`y6kd0<(axP}BH!@?$88h3J>p*=uooMaydP(8kr6H) z)2@u==$uH>YH~Gt;Twvk`-jZ-YD-fi^8UQ?r|tEe!%z(-HoVIC^uCAE-eAKf93HHz zRq2J&Rk&!fuFF(9vJET!Rm7&?Rs&ISc~V;XkLhA>DGAmSy2c`F zTY4;Rt&G7TJ~Y$b_D}tu9uqpoM>~g-AK}vSyN3?6JtDY8+iK)>>9|LX|27!pi&~0C zYrJ3jEAS6Zn7}D2^a}v9X)F&MT_eRt55{IC74W$0)-pe2G%+%2Ik!m|o^Hy00z5H$VDyA(cYZ41HiP#T} zrB{A`&t9L2#&b#jD>yC1?-?P$40i_dH%DJA7KJ?( z0V`eW$N94#UO$y#!4%oQB%2t-S({taxAe#ebm)&ZYkhTUjJ_4vRDjkO8kLmU9_~LTI@0xt2FR6`HQF3k=p9-@$o8kt!jUg-~ddR|Zm`lX4X0g5HhM zR~g01uyK;J5TzFjiFR^}3;zNaNijV7jL^L+sg`B7FK)VuO{%|b1Mtt1k{tR^9t^gd zoxl4E_f^sRhD-_#FOg4RI7U?p2wLE420DJqa^U8~?#L8@Tkm0p`v8MwfIXDb&-P+pynkwgM8 z6ol`i5*sz2uh4S){3Be9n3IcBM`q+yz{XB`+CQuj>(FSa(qyZ+bNoeNme?V~-RS%| zOJ+X$URRk#I+(LK0zimmEm^NFsL#@CNqBP4Z;Sn-&D4VbTgC49 zPKaVYFTNfuG&_UV8KdTy2`7HRM6z+IMMKmwk6mC1=oG*A1gzPwJ11W=ssF>Re8yLC zZ+?H_Cm{z`}=Susy1cb|7z zIJchQeO9MaG7VF|M=_2RS7KUbkiv@%;aP|;ZR;?iJOMvp5XG-cLx+_U;1a!u#+Oun zzh*zt`Z^5Oq#26FWHzvw!no#ymIo~0=0^!Mo}gdpZAuNjJIjVA1v8)nN9!x%O`BV9 z4~koLVF_~qftg)}N5Hc6m-8pGz1;Ua4?&Kr94vx?4$uU@yR~#e#glZ{yqy7*L*cQZ z2}1VEBO>LAaKSy+Khh;m$syz5NjccMSH-|fD)r6aWbNN(=B&dW#LFXu_XX5Oy8Gg^ zVN9Aw%DH2UJHUu1xvmCPn~ut&{?Ze)%XUxBFy<0}C~=Yhk-JHre#8XclR{Riyk&6C zaK>L2KeAr@bxy^=i)U2PD!;4@IEDV642Y!r(VSlHpHGpNOT2Qjd!wkmR#5%f1P41$y3zV*9GUCh*pt`W;FVRw#3ckPR~u< zB?0xUSIEK6m`h@cQ}Pw9V-oiW$PQ(Q9MU6aL$R6>sW#ofP#sfgE>HID*mJ<-PCqzn zD9q#&$mA~;hsf$GDtZ{X`u-fZr+#0=7xs8xJhfG6)r3=%x%f)kZ}$O83Y(B2?}j+n zK=aMcGd-GkYm`kv%nIr;GHPG&XWU_bd;zFt7WYSOL;Vf=q)u;A%#~+)kx@?%&nWnx zfyA&mgE{5r>oN_{_%(OOoK|nI&*ai68MvhJBrw=DA~Op~xzYzRhae-j8+$MRPRt{NR3t zRROqDZCtNNZ@_@Q&32?$kN`LMgSFVSlENYWK`*5f{ESgT2@2<=l?0*cb; zzPE@rE&wB`JdDgKXKk1o%wJfd5x(q6PCpY$cc`-tD<-~7dWs>l6+V$GqTZbbg_o)_yffQ(oL^?$_P5QQ zS36Ks^@2acq_hwd|1I>0C?zuDK-?B zth-U|8Rjnhh_{=?PF3{8KFH^9Aw>ZJ>9~w#_XfMRj3&fo2M^^f2rsH?{O_oEcPPX@ z5prbkZ?6MkbTA&!nD%Ar0WCWA91SCaizrAA*u^%zR)zf~m)j?vHC9L{UviAhz}IQi z?M~8HZdYlrzIE*{Db5~hnS=4*S=S8ySvAKS+G$|=a-rG^!u^f|-F!}+Z<}S{i+f`# zs9@__+w)5H*Y2u{L0kys>-3rI--8*8MSAWrMEuK?C~ulv*!xg+`8{t355?iA6zC6W z!SmEqOkReCgh``Kvcx6#Gme5j7k>8wKH~Tl=nL<{JT_JI5%bUV53UR8O6rUJuTN-9 z?Z}jc2wom|$-h8;xTL(X_888MuLAm%`u+R6-EQ877CMfDkO8<;#?*ViT}AQ-a)i?& z1oJ(7eI%IUF{?w+j0x{CG8I_ZJtmY5t3y{S$~qAP{~2%6IDPf>Adad6g~D;)3#K`( zIrD@LSh@A^t^cX(uhzndM%OOO9eI(#I?>efh}0`(og$d`1uPc_OywrCy=bGt!TI^2 z20e_-P4xK8ra|BOb)rD3Nv~8)qT-E5T?YmR?t3Q_>2K#2Nf1Sz5c;`z{};<)5EZWY zp13!p4$q`&`E_jLOirlgQUH|r3dRwvC85fHqRx*KU~@UmWV78=>xB3)c)Tb~cbtnH#1gI&mXbF*S-_$H<5NX(X}c9 zpiAi29#cbuahLn$Z(VG=f5(Ap_a`o9O((Y2m&l?2lxYPu>aFf1kC5{Hj=B!|4iX0p z;fkfC%L{IFSvm-fkL!?Jw1@-5+UDAD+QfgWqy?&%rqB=;{PSPBzq)~8eQLL^m#$>6#I;e6{fxmzP^%NL#od-9$=uP%_$L^}aRI8=n0$ad10J17l&OfyhgUT=Pz z?<9?xT|F}gZTN4M!~XrRyT%_4DC>{^wTMRWkeq`}mrs!7d-Uq)OgZw)0y!WPAXF&c zGb&`N$ zD>0l~%Cy2gz5n5Ybqf53o5vbLlur3QAW&6*JChEaDBSKTzNwv^+KowVTU<~sBWsye zuE}R@wY>6HR<6^$tssrIguD%pvZfsBR|Y9Z=I3r+!<`wyRLP|X?AlctA83Yf97so)m_AnWQ?slTB!{{Hn$^A;DueF& ze&a_Ma3uiLF|-H+FIR3Jd2y?cb)I+oBr064rvn=(i@r|wJE7o*d@^{-x2{;@WZHCF z27pvdZFNQ?jly2PSVJrGgMO;tGU`Dp6L{(j0xReGRWf5y>M5B} z>@_BAq-)KT`g^VyVJJ(OWQ#aT5N0q$X?3D8*QTfTIw%KV-;0s2K3`JNu6k_A3r%5P(e%uQ98 zMR@!(>l$-2FeE&8m#+M=k>t1~V<@~*AB5*{=(Aee$lr0RHcPtN%%hw_U&UW5F&&{v zvuu#PlUoNr*kAXt?8>Xn&e2y(a0b76Gse=N(e9i5z1P>&*7zYDN~h%I7k<#o0av^N z+?R)as_^;o58PA~^V&G0zdm*{$NVB7(ydkGW=_V!11o0tN8e*F(KM%ZmA!FU<22hV(03Dn6vISp|7ISIQW+Pekw zoy?J=b{{z{QnqMlrpMy;$xb&@H^jI_-L$D-gtX@xFqWveTai z4FQlbHK@6Y+wKY2-S7{@`0^p)HiEC#V-EW#Ki8b6uOi$&=3eWm0I3oCv^~=c$wTM} zM&yFuG)?~=`0bjmycuSfy&HQooX~rKEf~_bqd18i{nA|$E6KY>@Z2BRAG2w)@P@(A zr;~m0wT{Zu5f6PcS_)pP+cZ$ugizL#`HTmz1nNy*UXRgKj9NV2=MMfA3i`X4SixZve)_8hmEO#270)jljLG2 zDtUZ;-2P|ZjANNEXS`mio|vp4k6|}i8K*My)$FKxjmHn=!Uy%cx_;{}&kj`A;|g+g z=wc})>THrU=@na*dWnUsIoD+xX`PdEz~l1w+srT>F(<|~54D}>I6A|josTmB@rm%2 z#}}JxjrLAPxcBWyfam(%wT|hmPl&i+)|&(3Kn?ijE}@Um7fee+@gL(QdL91PHO-Q~ z$Y9M3>&u>P>mRkh8{X*q_{-h7z4c|u4D%a#2W4-(dk$Z7*vu*vZ_nVX+bwFKYurR^ z?jGtps2Q&WE(E}fQ zj+Ob&6HWDdMS=-DAj}!D73ZGNMVF`pTA!=LHUbw>mGznG$^=rh^l#C=cz-Faa%k0~ zaOD{p!KN$_Yf^0K3OCG&(6sk}t(*fZzw`l&*id}D8x%_Ns+cQf;vdsrUmLZm4fY*$ zs;30#hE;wb)o12jb5$j`*_C(BN%E!@dYM28D9Murl-FVHAn|^*_2;QNvu1QVY1b6n zwTs>9y z0Z_)R+f=7_4V>jmjn`BB`oa|SUR==<;J1dUPRjrd0#OpNz*G5qyP1=JOv|Wa)GaNJw1@w-%oBIJ&J!w zR}YCVS7qaV@rQ1gb_MKSulAAtaInRQ*{Jj6i4-5NXW|tOiwcpLLFZ77SnZD%^_k;w zJ-AIZ*XZHEo|Qy58c9CR-N7e(mQUGH#)sRqp58X8i!;W?yaW9H9S01)kn=lxCD)W- zSDf`bTWEgA8l$eFH6V6>i{ot4#T#v1hQ=g6gaRHO*f;mTp@CoZ*XlP01Oae_1XiXi zdGyXMAa%a<3i2dN{-m{UGu#GhrrXkkAXkv39 z>_=5(k#u?UVanHYNG*o209qWy5_iS?L$NlxkM7yzNB;uhU1Z+4q!(H zctI_erCBJQIaGW;rhRC6o|~Mjzc{UKV{AmhEC&%7&J=Oj3q0c`}}SQ+lwUl6;OFSZ8J2I;KFOSo{CuG}j2XEc@Y@Xon5A zoho6DGq+y&_Ad3^C@wD#@RwHSW}2@z-r7}lSV4|Q(2Bgtm**XC&p-Jt!pnsB**`nvZMwxImUhR8#U7$+oQ1?NPy?J{}8%p z1$KgUd)613IFF(P+Pt_A<6r>>ip2+%netI~{Dk*qYWXhKfTq{z6O>P*y+`N}uczVB zF4boVnzso6HQexqkd`A~el5^z5Gp^RHnS%rvuc3}9Wz^FggXF+f|y{m3#D0fe{q)F z`Bt?n@1Q7$`T~7$W%H?odhcscADrcdnONli<8?IBE5;YA$in0^|Ir(b$H|K<5Bj1p5xfXSL%k?dB0K2j-@TAh^i>v;WBI8`;)Wa3zlr|h9;SI9X5)vS0t$wgtCtd3B^=R#DuyYr->Ypt@jas z9$hVRpybNpI8gb}uswJECsnC<{gs*-6ujuyx;(P9?L%Xj<6`z)%~-SsCMHrAoX6kR zIU3>pY1x(0qOrpUZ1G4X5%9PN4cU_xMX?l z_ZBRRiC3PAP~l8tqGavj%NY6hvNA?SbJhI-E=90S3acY()bC{bdcS(1me9E7w>!wm zKGaJ){xsnQX7%k_RR)NwV<%&FUatT@)mwbZKgr}jRPN?ySgth0H4IE7^mg>J@fDJzQ0JV0%wS*^ zCyeDGGs)YI6dbImqb&i1E~FLj+};{(5#es8YKO;Nlr+>;b%l4cY~f7iP&g4gO{TZy zO&iF`*O||P<)C^(6H_&|hF#ZzTn_YrL#;DI0uPYz#hAI8=WwLFn3+E&VdWe>v8?ic z2P%elj!WFgcz_bA5V9rAZ_!a+8QpH(3sU`b3ZId;^m$@q&;97SE8j=1Cq)M@LHLpg z-CoRKNx0?RJx^8u>j|$au#e26I{aCdNSt_bc`{F&UFMMsVQLe_6i2pL+U%4vuJZt9 z(FhFC8Wkt-xGt%}CsQDhESXLNZJK+K>$k!HN>rGFOLrG57KN96cMw|tZQSuen0x_r z)GuLbzEI|D#mlaS-=uD$IMh?)tw=2fFOyy{o+|63ZHMybppAHUC%eZXsp;pGJRtK0 zn$FQb6Ahf7r?;^&$bxE^zPJcA=8O(IwF++Dl5d~qf2F3cw1|08-{MAgFR+DX;CTXU zl0YtT&fx|&IqDceJrsv%lFt!fL-qXAi0g0>u5k*h3gMykD%m1m)@|TFFnW&8&qKg> z%o#bdG~@AfDA0Lek_(ERL@4jo?bTp(`)nDNu)Zp>!V+Uy7=xzyLiZd~f|SY$r#F&Rk*dgoeX*KkfV<(1=g?X> z-Y>r`R}OsxN;=E6yHkG-j2N(_H`4IkXo%n;J6J`)!>oJlS17@i?dWe@ifnlSi2I{M zip>o>@8R4_f!D!>5FXt@{kE^b671)GxAqKyj;Z4HA&_Fp*P*xVpTA&;1$|yBd{cmF zr6WqPSGG9TlivegKF^;nZU?Ie{YvFUAE%71w|)bIDoF0lc+p`)2!vElmSw6=g>&MQ z`^%WGs?lJ5Js>^@WWZ&mF*u6gEUPvJrMnRAe;vu3@OaeZMb~5VBnp=E83t2XItke3 z0g!Z8lxiI>vzU-SL4kQ_RL70SQ~P@UTXl7x3U18veEKao4f^asda6nLTh%BV&HY9uSxA0;DR&+mW%Lr> zn*BL0yQjPvv$d-v$9xVk!zSW($4zw?3z}15)t$YWU zmJx!<-JalHJ}Bd)KQ`SzdK|D+friBa+*E;{1JxUi_Xd(2fOoC|dh!)H4E-AQ->PJ? zHbRoQiBF6(W0!zMVeUBNA-$>suRas@FIQ>zQ31+E91`%#Un`v|#&*ty8fskag(mib z)!55Syf!Yi&|gy#3XHk2al(ouJ6Nzfg^a9x|`jMXR}CSjM_wu#U0brkhPDs z2ao4Hz8S}M>OvsS=MZ`se-s1!fPoM2!A@ZXRt@ad&=f?FtrY)o1Sf%QED>VONnhN^UrV4be5>O>;N)K8-rRhH$J-^EYNdm&$q zErT%!Yd@6N?If17*YR-%;#Jzeqb!#CGpkbq^q640tTdskqhatww2paz;LVCLZ>L~R zFb8^4fg#Lis5Ub32#K!%h4fE8xeL+|;N*~f0oekz<=u4qC>PT zYv>#3?P1pmLT!QT@AK%Sitw#41>FkK_(;2X|67k4zSeUYmpaMR8@`==C>kZ@cmr+) ze4$H>*ETeo=hF@F!HJks2^vM|6YcarWon4rB}>?QLKjgOEhI1ZvlVg@&(doU%uIkr3$RBpGvs+~y(8#0rb#=g9-%sjcGw`?dEkVK!|6(Kwc;`cS|b418s_3Ac? zC1$dl?0)gm)3vK(K(&kPPhA9%6+oqR8TNCJ%box%3GN2hioTIPkvRA~rc%BIb@N`P z5{P0($wAYBWizA>_g)I#7Pz?k79=uQBdf8%$Y=Uq2n3GybRrwjI3p^XI{|N;L+wKn$Hejfei*lG)WnGK=dseIP^GZ0!xP9|7ja-*- z^!YK9M<{LR2~nm8=R`$mzpD|KMXGp%&?E&-^IBS-N$@9i)^QDfJ`VO>3m6Ma%;Kx| zNmbrybx^Mt8^c1DRu%0@rmM)%N#^PtB&2X}$xZoIFCLeT+y~Yh8C@wo6M>FHZA^j} z8tk0bnr7*(x{XgC4x6?JfmBheayMX?M-)*?Ju4D)A*(00I^!4~7{`ncNl+WfSO$LY z%h6wVT7LD`m(CI9o=yW^X!<(91$T((}d6r+myre;GlNt&!5DU|?eJDUA#u|a53 z`hL`%>cCBI)Tek+J)eK_eRY)^awHI9^EIj#rMTM5`-<+SUSn*~GBZ=OOFq- zF));{Nk(;fUtazVIr7iIcVNJpL~IKGqu=~(>$9{aiX?V@rt6E1I#~81`X4AGl7nAt`>IM- zUKY2M-gd}=CJwxsmg8$s+S2=;TLNUMQz%VH+VgIqXG5S=3oX!qoPkmbG}LTtN>$E)J(p^qO7X`q8t zSLF^}710oPowhBcmQX`SJ4N48-bOKFjV!t$@)(}#1t{L=A07Y#_d@(H;LkZ{EzQTx zBqC^vZ_o$s)*AVUufAGIxLyfqzr}&avHYvFLZE@6O$PovR)0vmI>9)rPrpGKL*1DG z{)={Fq0FvfCHUzWFbQnF{ySZ8?KMcxamOk_HuM)UH291N+!p6R{zQqE*i^97XixOR zn-3!C^-3EBlp3JW)4=7#)#Z+N{Fxpu9{$&6m)m`lyJvpKG5Kr(^|jST|7EWR3^3}w zE*c#@p|OG7Mp(OK_wW-4qq#*(uq5W64(gk`e^nL>9C~N9vf-&c2Gzs?nlxJMK6)zV zCmB3e^LboCuf*x}kHkPdhtOu0Rt992Mp)A5F0XSu&(!1MWzRS6m{gP>)m6l5g9$ij z`q#p)fF2_V@P%Lq(>d9#I3_kd|92_G*E{W5p&O>VS(0>a1ko}V-G?tI}iwVsOpZv#Yv7UOpmZXxp3F`EZ zjSbt_ciwe8ZOfz#F^n=7F1tG)fWNx*))&47@n;E&K51Uh?{p_{7MelwOZr!Ym9PLp znyBOEE;_Nz7mMyStNcwHu4~scfbjIK;;Yue2Um%6wBp;3w+Umf5~QzlG_N9SK8WZK zYlAp#6y1oaFH-hp@$Fm^S8#dfjp38|vUm@%b8aw49q;miEm7YF#t)9~1CIKu@|-ko zhe6Lak%cnvg8IJ?0XU454@?63#S+)X23v#~3szArQv37EZuAW9#h%dhMUR#|PDy8H zISW!QN+x2>`7J>mKbp7S@%iv1zBPo@30_P0FnQllE57@^t}lgTAv#q`=mSq>?8&CK zEjeF`ZRul#*=mA3)}?tV%w^z^^W$5w?-a$9UgkZZnhT#n_w!?ro&_LX!f(-`_W!4IZAMI1x z64NgU%f&7-L5V}p-f_{NF z|00Sj0M5`rrpH}Vk?l1`Z%Vq{H>ZeCw8VOcg2GGrQI<~u!&BsjNNwm4cx7+9I#Z3? z-$ftSlf~xdcbOVq^o9GVaZ3V-k+C)C1S6u$hN%232H%me$ZN5(2;~!W5GRrTC&FNE zBS91P#_Hxh((ALWuIT%xmfDz+9*10dJ?yIuNNcGdDH2!!smY_yP!fH& za*Jp+x^^{)xUN6i!;)r;N2wr85`KU<=KE|rMqM)xDikMPpq@9q3{QI=l|@Itfqf(3 zF=7LKbH6GPl&F&&@VsNTLKPc<^8+|V!Q+h_5?=bhMszKhOnpDt9b>B1sO}=Q7U?}2 z7lQTi@gx`SkMSijXP|Q7pCwqMfJ1YrzKp>2q;aU5BmMwGnyK9ITbx6Fxb5AwM>xT^ z@*`|o^tO6vUG6b?fs{RLl^nwM)|bUfHS^(jDi=+(EkV(%1>84*PJ%8V$j-z5*Mk$Q zcH-B&sUlM5{6 z`d*wdrnT+{o(!O808q=4;@4Zj0O#*X3yJ=kRCevCneZ5?2lUqf@_I1kfOvB)FK$z{ zgCLI)$gz)BwFitbA0^TzI)l=V&BLAmHJ>@ZqF}+wF^sAO3Y+?*I*7MCOi+^vom52n&d7rG07UQeg1bgiECIJ znO*(S94Hva430^bbG*~2d>m6q$+L{&Rnbf%Gi-4jTi`;7W4;G@ppk$Y=Lr1eb|o==r9T*F?{YXT^{05=`M zmnFxG+iy3Tc1J@!%OG)syi?APe*>=P~Q@R7I<*Su|UZt%a+ANFZu;<31T}Im-<1zc?x2Yqa!;ZHs&Pu zcK)}@2e~MIN^fAVZ{4$7uq6fzBEbtKz6o2p1lE(&o9&|;$rvRJqLw$+M}DMzC(Z?g zrZ_#;5}ViW?l+iIQ|P<=*Gv?tr>Cyv)f<_F@J=*J@xKX8b|{q^81dniRESr;;rE?C zP1TRdUi{M^b5IsNM*mYgw7pi0fGDy1=tkJbO;tGTk2}0W9N)s;;pv5$83cXvW`FkJ zV4Q`I0z6pH0)9h3L27~IqvWnAe4>#f-b24(bPo$y5}Vrbi%vw=UVr0bMimu==rBtG z3d)InExXJ_x2U^aVt{}$PfEToG&royEjVCJwSmY!XOVIcPEy+m*uooY982zRO0@s9*9Fu}!T9NLlqP_L40&R_H`pe#J9IX6$A0PtzG%g|Vop(7paQRZ~p4FEd>g`57fYUnJ-gLFe{=J^du*Ti67T zA};#YB9OdqkRNBfjvgb&(_RRi_uLN&s-;NatYFst{}W)klmpfB*pB0YHEN(bTGBNn zvhJf7W*G+*N{y%tugUsuFE+AZrPfy;@#gtH>l-PNnO;nQc*u3~W}8v^vC5&>=sg5F zffV1ct_!tS9_OJ~K^3llUwGFOCzyUFf%0Z8LOS4e89!-seUp6z)`XL9cKQj1s(c2GRKe@=lx(vVzepji4^bT% zE!pE^{Wa**&UpT}MJB5%GNT;|Tfku3=Cg}cKnAHgXb(M_BxcWVlKasuFf7P{i#XsN zE*P$}UAQCJK!S~eJ}&a*;)JZ`g_PuIDp8`c2zza8c}D|{i{kFChx4{`>wc&%aEq2} zULGz&C`^^f>Z%#42s!wb9M&U;PG|90eAM*X%V9PIz7>h5BxzUytu+n|c+__L zvLc5*W{!*d^Z1LM^+a_2?&}h7;Eu~<(Pa};uM-72ji`in9EWGU1&Rm$r=qn=CVes| zM@4?dJgGXk?THo-P$(|ohDe6=SysnY5%ZcWbT&^^Fdzf;J`XXLUGemy>HHy8(qC)v zoN}v&J71E$I*LU*F%*%rohz&Jx5*_;Jv!%=T9DIWAl$6U)8z*sSlJF0Hwp zI>8kQ{#-R}Ea+NPCcR{f-uIvVsDa3C7uX8Wy=23|^FCd~@Us|eBU1j40X0PLTD zXiKx?rz0}8Pq<$|RwGtjw))eI&*;zMTfHyTi>F?olvmRhxF@eX{q+ut)9MD94fSik z%?T|}t^^c7Q<#@rbf2q7@VB8yzy5xP%7RokAP0Y;#(-I)g8W#-npi1$1hag*h?3^E4nv%y|?^1mI*rWId* zP!v7WRn(VoQ;`iA!FU|QuaU)H?xvX#*H3rNy7p2-3@hxcjC%R*-62Ewzz|+9qd6xz z^p{+A6wm^~w}32*^w^eiUZi?cZ&3Tz;wkmd#pt9{eRi8kRAqD%<`x)VpB1za4=qYm z8Nho3dmP}KfS67$VMmOecw>BE2K#ZSy~g+m^)XjjH5~ax+o2X)gt|dF>993p#YUQPLv@6l>f7qF>kJO(iU`T*O*L_-BvB&8a_QseX zjy*I-J}h>L7n%xgFq`P2w{Muz^nTPlVt2E%E8{+D<_m_0CV=9Z2m5SgHa&oPY|8K~ zaQ9Agh7`9U#~BtRAnyz&+(o*Y(g1K35;G|?5aw=QGG_Dv9x^_`in=2YENx6d) z3bQmFZ2w35k{LaL5>R^M?7@*=#mJ+n0RH5P@d#94a$4irOXzvc4(U$d;Gy{Dgrj4- zza^hpzkDni;JCD%Vo;l)Z0jy)_}7~t{jeUFZJvKSsn;k)@)64 zHTnyRWb*rug?Bnef4e|6en`TDcj~%zV37HjJX;5h)nP2FOY~^-(dDMH&@n`XQ zJjP6KVR<0EmJf1m2>(~?-8bm=!eB>ymA>J1)K5ia71=ih+E-TH`ACqqMnBjNDi zO#7o9AZ2-p~yF-K6W+uj61_N7BC*sbgPUH&3t_ISmUpqYN3TS>GU=DQu4g6V8 zNtsg-xbY7c=`shz5kOE__G>vTd-}~vd!EW6V^j* ziSEs{dnl+I9J@^GlklddsC3|smBguId!Vprh_l%Oi8Ez@`&tUqXU0{%CP z6umeR?UFT{(b&ExtrtERJY|bPV(ftco4&+N&L%%^*B*WO1cYAvx&Rj)iIVFtT2iJ- z{?9!GRq!(n#WYE!z=+#**Of3^^$prJk0#Hk+vQ z?K4}G4GIhc<^x}*=(>~sl`KNjyUL_lbiJJ0PbmHIBl2T3i8)~qGj307f$ryEWoQ5A zR#Hmy0pNGd*}1>gXJ#94pm@W;CCc}+VKLqU_RxpJ-_~HFFZ9qrp?H*RfJk9q{9_kL zlWYahg2fM$&B>`_1twrfM%S1>R8rrm$|R0Kl`wHO4VLg!z*d>Lc~c%&X>diUcO31a zEe2{QxXFq>y{nmzW43r=dW>r8snZ$Rc?2YjCb=o=8LJ`2!{urML($;i;0ZUvcVrf! z93N*GA`=G?Z{-ajfnr|2V8w{m2WVZPQ99x(ukxgF7}F{4%p5U@N_P0U$#w z&~vS)-hSrp%Hm%rrmMcq_HJQF^M6sAfN|H1vs95rja;D= zoxlq>-4gzMT6r208-QHEJN_@LyO)O8n`B;%$6QJA3CyI5B+NOH^hK~?@8goj;5UB$ zHw|lTZ{2pmD~a)YcBA`N+p!$}t)vgK4|lJHx^i^IO%a)DOP1aU<*hkzi&11;4VE`JS>a6 zZLqbM>e|4eB{Zn~HNW{pmG>5x>YZJtpkymLk10kgmf{-;+8$OtS^tlvFON$qZQpO+ znw+ML%F4`zDJv^WGd0Zxrku$fQz|QSNv6!)QZh49fip9eW~PieWv=9?p}CN`W+>%G zW=gq{D+($q8X$^??YzII_xJsy&&U2i59d7RUatGPuWKu3SM6DHw&DV~M^YQ5Ig5^Y zykP#oL9YyUzR*}%^SY+*;QZI}9Y70~F{g^_04ySh2v~^uJJ@cj#NfF8kRUX;2wiEY z>y$Dv9J=E7vci!s;MB!e_=_&n4Q*TvYh-WabBsg936J-lQtR7(nCVTg$p`c|JV@Y0 z`SL7qFWDtI6alg!N9X(_G^!o_m~XzpYXo|K8#JP9-aWM`5qAVDLjji-O)==_lI@?>ee%2q*)@S$bVY6Q(s>4|xa@Ce z0YJ~_7lCMVXSYbv0XqwJ#m{!XoBMi;Q^}rCc;92lAthFGbYMU#H;M$@qS4_iEH8|q z`gbPPrJ}ZmZy$#T?{5!VfJAE2Vb8o4z4NF1wAMPjj{A`(_I$$ za*TR&Ey?nEe`hu(p$xKwTXCrL{??{K1M?c>V-x>LZL5R3DcDN~kSah@zxbN>MFYG; zSzb@;%U#vq>i^2uTpWS-$ouxznrr@3%J|<-zyLI>3ltbK$)Vp6)~AdzG>4!z_*GJi zY{0|&o)k*AevHY~ojHs&_O2^}%dvul5-n7hTEVNZ6&-u(HQw@VCpXhYGh>-MdxE}B%t z5sBFXc=}#4E9D@a!NB23^3usO4h1`%O$g?I8Vq6EsszImtQp-Mmt$#nf^=?#aFHH%;^4*;MA10OX;(lFl^#uS6sWYyQk0X|L@AvI|`=x=zFTAzH0sX!?ffR+dzF=f-X2z+lx)-DcR+$x|3uO5ng3cAZsq9D zXV0&RDkYgp+*y2A4CRZ1jwH?*zE?y%1$`_7d!5xJOTiCfhZsR3s|t)179rF$C~X1K z7`o}=2!1$7NzP3VjCn?#18Ll3g6-Zi@;UhM%CP4%@_KT`--82#K3}{Ql^Y|pfkg9{IT;X7<6W+F zOmM95dNs1EQ`SyZ%o{;$Bl?Wj!X<3u5y<^c*|{fzkQ&Yb$^@t{M9W6Qk&9*d z9faHc3$_CNn3|y4N4!1i0~s3bsR#uH^Li|*$b0j(Ht9AnccBveo-F~=-$itoLgDrl z``j)zZdrIkUW|MJ5B-1=0&)r!Xzqi58%7DUa&wn(8c!R1?nxkWF`L>Ub%^Fz|2U|C z>`YF5TYvKC&5d-svz(lu_*sNNniJLe@h9|~WnZ=2*$f4{XT@f3=ko)T?O<+DK#|ed z6@no}50ee_2Wjc(i71iht}2q)S)6NY2<)~VE-ho~iYD%STai)(&e|`3C34&;IYBG; zoc#PiJTpY?rI3;A$n$QQmKFmlMl*u3S1t>N7~xU7yt4Mvm@*W-?(6Ef^}?QQR!-}C znt;eB5u1z2j?OrNxP9l`eoxPdc?I`&8<7n)Giu#UPhTwfoKF1!VqmYrAGx?BSahU3 zK0ezwP)i5u!4Mv4z*naParNaO?>Y#3r2hhbZNLeeg_9j=xRbYEyElN<2J!oqqs5*2 zF9byDGDR(C|9%8KqMfw`hYvN&Ob8L|ZOdb~*rIcTyvdH-QJ1tu!NSb2yz_sQ`W;ep z+QhY?^aHJ$h^g4P1v>;w4aq$D`OSP<2Y9Hr*zW3E^B&eE;!EOMdd+@>wtIXBy1f(Ki8tuwR=)Dst; zfmA~GvzQsJ-w3E(LLMXh4qd>e?0e+Rq`gz3*|s8$<=2~=BR8~dJ*89I(LS$4E@6NC zlGxkEWcR2SJ6E?q1|@+T7VR`olL|8*hmEWrwOz7WAx|BkxEND&r7{J|Jz-}4?C#1p z{-m2whmpl6W%SaFCS>vUr+JwnAgGzt3fV%oY)jsjdK;{;))>4dBCcyva!qY2`+w2< zd)p^L+z#A7%W6~vdto+oQCEIGKFoitF)#2s02}Ki*jCiGklG8tn&Mx+KD5mn4w21Y ziUd6YG>CS^G;rQMI&h|3lQHdF3(5K$MDpu!BapykMvwz{Pu#{B!KsB%lSP@YFZl`2 zwI?bCdKo3+D8OY<-*8ahCdc1qF0}xEl4%u+op}iyB~otZP?7ErKStn+D2^OwJ` ziu9V1`E)`CHwK4>n)3%l{ju!9YDl5(T*`S0nvOdlYzBmr01RpRDDeGSAY`qVP!e}|9zK0bw zZ3JWn&>UFJ9MF3(yh>xvrftRS z3Xa9z0q@FYqNg(x+(>&gd~A`7#!0V%iw^;)XK_CI8?cCUYq`!b&BLJ3(7fqo2j7!c zsthoRp5(cGE=BsS^nEPBQ;Z&bEUzd9;7t$>&V&N|K&>r65Bdi95h=n8#3?Qlrzz8B ze%X#m4+R>j;O%l}T?c=Z!BKrb19{KR6Y?aQ2JZc+bE(YNW#jRqqv$HX5byZwbES)G z?J?=;Jn1AI;0H6T!r%ljRV!^O$nR)JF&9{aJLQvMqzKjYR5e&NZ^o~k@h-UOg3vDl zLFO=h)#^`xrs3X2jVaQ$r_ME)b*Q`>(V+!d6j+c3fG8FXfWljp0n#Knxw_9mD1}z(+wGk07AcXg>#GvBn`$PeJ{-$w_a;1d_rwns=1@WHpiBK z12YQ6_pSXkC0*(G?9n3RY^Xr6Kb-m^#!>$%0u+5P#Lt14!vh|9opv46bZQn>2&sS;VcNsZRH z7&eMf2KoPgj=JG;x~kS8HZ~BAJ4MN8y{HPPN@gl<+CXL)@Zy2PSeE&sf1mM{9g|QG z*3#HOz`)XaW-F%ETE`Z^^dI8bE*+)s{;=H5-Q&k$EG;KmJt@QpHAjFGnn(c%1~5Z- z+;d`qyL?pb?C$JvHL%P%%HPTqVRYEC#aICdVhHqPO)XXH&7ap z;Xpl^_iOa7n}JjzMIFk>D6y8@VOP%L`BiZOa>Z3$8-Z_XbHt%bj|wiomZBfITyBQ_JU_1H zD2)IR+Zo52V@sR?NheL}&K_gU$^7%+gjUoKENNhpwSLJPY67H1LZ`|(_gT7XmwauU z3jK?4ZaePIq8Kh##}p@_*1*``x(civEmi1mt36{WyqBKu2nmCikbIRTY%cJ8SZ-X? zK;zTrE$Fl)nvS(71@z{cWFmGPPr`g!G8M?BHI)^;C8+C6mQKll0zpe~0MoeweeoZwox3C%RlUQn4XbwZ_mlce4>G{bHQHNU zjow7Dg!4~S|A(XcePAUW_I!)zgM|VKgMxwy$Uf-188W!AJ1^k;b1Lr`LCW6oUlztw zi~##4J(8+9&?ux&iq4FjOf7F&_{2t~O|n642br;QzuJ|-l6LwRuoEx30s*uP184;K=980!H^F62$4gVSgMMee&`V+KUJNxrqglw(9 z^v@AFBvnMo0sr?A+Z`(f9DrWdt3|MsD^z*EuqfVVhRd+tf81g!R~3x>lI#Eeqqyaj zn~)-wZo}q6-#uvf5A8A71_F&CR zVVZ*_}HN<1@m?Z8Ve9Og}wh>@#QxavU_wab zy+fI<8qA;`sMChb-mGUy0WCrks#Zdw9VOcS@;*(Cb&^(!>m?_7tEOOgH7LHj4^Guq z!g+ik{kUFBtr(5yp#sX_G3uI*br>^A*~j^seol)wrYtJLT>Y|Vk$gIIdn<5*T5D(| zJKctL@Kn4K`I%w%_dCQ$cdC8;Y^WGx(rNgvaV;<-?6o$BahSct=9WyyGMXB}hu|fT zf|Qgrx)h91{DP6G`g{y^0@Qlj`jQbciK|{Yf?2KJ*pJu~d8Z}iv@$jzfBgvKnmy4Q z8V{0Ar2!6kEgUBq?GnyGA(fwjBaektw{388l6D8tfw3M)M|U-DGipA}AK42icSb1dZ$VSf8`=ODceUXS^?>3-PaP=D_{Y28dRf>zMHV<=O;3ug%p5(kET* z10WnV&U`Zz02*DJF-s!x)h9&v;waR#V;U&@ZU`9$GjdOUf1bprjrJLrun#boT1*IT+K`Bx~6k+g3InM!2-<3o)|QJM3OzPJG(s;_rb*d?5|~tt!m( z+{$x3`f{q)p`-4`iH9oN&dc>4Yb})?1b4)=q-i`{&6+a>_&HbetIq;qtLe2{#6a}A zEhhLifOCH8w-m9q`HRBDmZisvvIF4j1T?j3u61HdS$p9-Dp7I4FPe!;-GUI?9L((aDAr_7F4ONg6G*ENCW&7z^UMJ|t4rtG;`hm?npmWvVutD`nZxRWdw=PQV2nN2FqY%Y zM?bihdRcm+fe;fsE{+Ee)T*0zge4{LAB&vFI4gqcuY~^{T(&2VALbJpyd8V5+R*=O z_hP-WzB(u%~zz0oZH-}NG3jpbCC40^`(&#Um!n>gz32*tbj~tchp0`&jeFX-R zr}&cbmn^Rbtv1XF&G1&8wczCnn*%MLSA@goQoNk0sd6wKTO0V8sgpMY-~>sApR)}> zWGXTG@cQXcqa*dgBzy6u&`jV0pxJfR(DPjX8qssE9H%-&3{cxhdd^9Aw^lcl7fM&2 ztBvm;#V$f7O-d{_pb~VBrs&#-H?R}B0raI#O7J&zD#E z!u4ZpNcca*)jN%q~mx$mJbyGfCo23u+}LeQdOocqYhHHsXTrs{$%$b z-HT-=@(b;P>c9HCIvtV>F@rqU?lEA0okj=@O~Ik%8``J8hiJLGA_6cbU6H5Uw=-(< z@m8%@RM4ycuBe|R;#7}6ATOYQ8x17cb)E?e*H_J$_^f;kgqLTMTFqbZD^KAyo!4mI z0D(PrLHSt$T8;1LB(cOd$eK`T(#4BcpWyF8+pyPwkCl<{4b8LWe3-ybrp2pWqCnYC zo5mbra*9Rx=F&g8ACnzk8>irQDQHf-@>n!yn>BKuCOJ(=s3cv{{J{!G-Rtk~p7qQ< zpm|I=1O*I$bdA;tHCq3PHSX{kuH-1AhnV|5Wh*@u!@&#ArNMH$&R1U(&h;#6E}%!D zj)uvy93e95DSQfzYj~;4eFqI=eF*kRYr_?nj5Tq!qV@@zzqjkI{XA^Cs;kjU$Uvu$ z5sNN-=1%=GJi^YQA*^jbb?a zsh&@zfk3*mJZzYY{BN)`?`qX^_$y!hzwN)sFT2}-$vrG=l4yeW#i zwbkDIVC@_=x(JC1cdGW7SQyeo3*UD-Pkg;3-bM33VwDrr_t)|>2ikWt!MsK~Wp(_ltQ3smP%#0(89dn0v%o>((r@ zriy|)JyhFejpdweJO4G!qL%=TyoW$^qxmJIdG5G4RFGOB!~}+CM+xrqNI4Gi|0lTb z7I7_&G?PITDVAoa?)FiFOg8Zm?_&(J>LOoO@W|z0*!*~{YHue`bM%WQAw`4xtygwC z=00`jvPK9U%SPiTJIQNQ)@pPmoWmDyA=b8SKi^m^VUInzlMnOS1~=cC--_(-C%L@v z3j&lilDQnK$gpy>@%_9+-9|btQ6L?C*0{CPAnSfxA43E3`-|Os*$rS)||*N5-}Ltf|_wxd-4x0iY0JzbF~ayt^#v!LEBnH25hI z(APJCUku~V&1;JDh36R4N#nJ1BGnFJR&%4CM7I?^kR?x+p#G%js?EQ(=dSwDTI7>= zz*m~hy*Y%PFEX;1o~QjqU{yKTNcCGXC#Nckh_$DCqHuF8_%{caqL9HO;F>pm&b1lm z&Ml8?2eu4io2mDKfY$+{r+fm=pz5iQ)^QJ#^vfw9H$Y8sV?5XegC^Ry4Yn5Em);Ws zu^ei}^!5w+?0cZd^#m7jw<^rU)h^`DG)>|bhu*4o|3ooSyGZ#y(@Hsw@8F=v7`pqdTO8X7qO0)2*9nT09!GvdONTE-ZVY>z6>p@gLaR?9Z1QuWv2#y+8Ng6l zI4a9?OzG;!nzCYm)u84*1gN<{iIc#j3EJpTZ%9inYupo5!^Li7SSQD+bk4!4;~Jyc z#nBMj*15U=uHYYwk?Oy*n-r>dp;u9UA`(>Qtwt;RuoI{f$UoV`mBD8T81GP9kxrg| zqY3FdX-yuXJe0Rq{j26ND)Hb{bzQC0^WIXj8K)(8jno+{6_yjzn(Et9HQ%o8qh;$> z%1_-f--w5sV_v_IffSzYDVezm;Ux!OPe%IW?O0aIRnET1ADaDH-ikc`XwD~SsUAAU z+wx{7>j|Dln1RU_)UKjm-`s+JGBD!J82-t2kBjCwc%J!iwK0o}rHR-uD)!;PzssT- zkXJ-tq59m61)>VsXDr1kPOCaNxcdfG{?0Q!3YEHOFaF;T8#KCfeqy-A#aF3xD38 zyzAi9)gNFnJ4Zn=0AqyNY|c~w`A}?NJNOUz=yUdhInQDV1nUEG(FKPQ78|`mfu$K* zic|jiF86c2TD%Y^0EgGnxy<8{3$xX1A!4W<$q<4PySz4o+4LudU>8D?_=}H#B zS`GwCtk*5;lbZh-Kx0@>i0p;=U#|3$ZIH&_kExn?GS2^i->Vd;fEN&>cY6=n7Kz~) ztV1q}&y_{&s!%}tWUPLaESHkq-!-!nZ%%0gx}i7QbHyw3F2pOXOHbYwCsk-d zxPxgOUq~#Frg2NlK!sO;$vHMfKglp}eN|`{7 zz)?`LHO}5)=gbcT zwUX^9@PAi0d7pWkETd~w^GXz#*0K~j6j0CBRc-46+S?EQZ)WgaUKR|rd9y)@{FnX0 z(YFrEWyx5J(kvM~7PYvDX{g{D^vAvjO35bDG^ z!qD4wW&tYwh<$_g%lIa^)4#5ty8nGjYSpH-_pjBYX?Bhh;()yfXDkvv�cW2SF`t z7n-YHbPV!J43bUGr{H94{A5pAu#wAkmM;9An5IoRiif@1c+I0n;$Fs}DpbV_kZ!dH^wj3jZTQ57sg0OmnF*n1(zow^C`Z7oF>^F1sL} z+m1<~12#fX2r%`P`v7~R2BVIg6ALQMkFwL?T@Q!&o`F=luzrx4i?_$OTS8nT$JfH1 zS>l#y^pEN@CBW-6Pa;SFz1`2;H36y>Gy92hlYb|kX_wNRBh2JawXzheGui> zZ>7PnfLh9BlKf0x)*Ao-n|qw>e^~gN)+lXgYeWn+a6CDp3?SH|bJFh_YGaCiWv$8# z(-#vHLk;_R0=%7qzHz%+wkct~s9)Ki7){{gwgyM~{#qEiRidFaA!}5ZnJR+X_%|>!S$j-qSp!lLGv3i&ez5FT!|~Z!0q#@y0RYqu6pnhyD~=+cz_+Dd zGGpepEx0loQ>#(|CG4bX=)Wsm0cCCJMZWW(b8ZyG0FtmFj|<&>{|QDvH~PNsseE$@!SQxB-ICD5nSo}fDX z59TK?*!)aS2P(T#EsB#8^`*3SY(j)X|=1J(ZJmNjIpp>Ee0-7{HT z-;!lIF&rzGWpSM5OdXu9^4n2=pOHsU8z~kQDTJIIud5fH*TuWqtMHYxTcfu|dp?QI z^sBgr6#%nYmEjbvWJZ~a9*E7!%`3>=ONDs*Y9&IcCK8UsM^HZF*ItuahR}4?yD#q+ z-Y*l>^-&H6#rmRyn9@rPIb}}(%n%M}nplVP&%b!(VEJ2V|6BKL-F}It-c1|84%rbN zV5>3cN6LY`2U7!rOg-b;%zpm9vhagbH7avmXi?!0)QEbrQH6nek*3)TV9&&AN16~o z%_f>iG!Eh#QsQe2bFL`5Jph=8Q-JJ2sb)8q?5yd89R%5ti%SCDo~ry_zhf1fTyzym zxl(qg6K@p|{)@}a&;+C3qu+r^CbqleU!Ri$scIGvDp8{Z2vJX@ueVDjNt;A~+NOVF zgyO-b(ew5K>Lh1;2r~>u08afl&etzhx8O)sI!YIBx~P;5(YmrRT$$$5&DqZ;z`#wXojm&P49mNY3RPh+=S8kdwl zGWqYx{D&^!6)}`5B6+g%?q6#H6oRy9^`a8gwcgE{`a#~85Maz?6j)XUcYO1>hjDv8E`-%6b?fi=Q>(J|nCMPSam{OBu%3k8o++7rWOucd} z+4!uK!q7Z*FD_2PqTth4iB#9kg%TES;mE_VsexJ6EAu&m)LGsOSC}-T6Qbn^O2UaZ z?pdNe{O<|_Yd15YuY~sw;7cMcWS9I&Jr*s=B3+TbeAUo{=%}$P0i2`)BhDo2)Iu2R z-WLTFQhYQ|p=;Q?tV%*N{s$6RR2|>OG?CQ4i;d5ZES!!8mdNx(hs*qA|2atV7+4YC z;WHFJ5V?Gt+8a^ia}u_})|tsR=<|3jI0OvTsr7yBXKT&tZh7RJk3e`XU~-lC5)=6; zv&d9fP0eS;=v8Q1E?J%>AMIgP%6xN{K0b1@?tBaUWYU(qVJ~7#Yx+{+F&r+#5QmoI@rQAX)E<$# zWP8Np-?C*^v=#)P`hBx~Od?VVA_^LZ)Pzhj*ANIon{>9iYN5526%L9Y!B8sP7ivpl zZ^CBWpVCp>gg9?a=u2Z#u`er$c}Dxyh*MZ(V0eu@UDxh;@rz4P;7sfhYG7zgTxsZF zE2u+V7s)zq6_pLWQDIJJu>-Mw>48NdeIq9h7lVxkr18bg4+Z{fu_C{CY!dT4{mam- zZhCqwCFA*YtY2PfC@^XyhX&?>RUK+lr+=%_u6_cRkI$cu4U3P`_Bq|l;(Hb%)#`G9 zaJxWb^ct*=mB#5U8WqsD2i_QEGqBjPx~~2c(?#{$@W4prp4X*55k;`6{C1D{mdeff zL{F5?b0VGnv}Y+9j5JOH@?n3vdb0=ez_S^(9yZ`FcVpj*u4D^bLtTDOslO=AW!V& z=V=Yd61!`FY6kAQ3QF>2AZ;}7*2~apfuGfrNWXI2lk+z}>@_dBQnBCN+%m3o|0C{f zhqI}<{Au-zC+6B6Wqj5)VlDq#y){8+)+5xWv}x#6HvnvZ`Fsn+;_Hi@S}tddX!&0r z{;^_(|4RLL*76&Y?{l+MN8K2g;K(K!G0k1Vkx$h{z)jqlG=NP3|?T|EV<0r zN9(K*(SxI7@7sljY$X&;x1i_B&Qr`KTJa{*+J_FNJbaepZHX5FuLtxUOo zFJr4cC_qxL4!Xx~N3y$0puq08mx~a7x_SS%H+-?7-jUNGwD!?Co>W_%cfor5Z=Jgn zC!4+e)_TLiUMA=m^U|8kz(tTeR$bM+1caxVCB}fcz=*{c>$w%ES4lX4w76G&Oe(rG zi*Y3Uk)Bz|NT8UIHUsW|Qj6}WX#>1N5Ege5Z1(sR)sz&6P=4sEwz=f-+vho}&I3r_ ztY>h{&93H2IhYqIBe+JOGskxYs&&G<`cd${$IlZA6=~DFHPBc(eyf$dm+@U)V zFZV6K->Cce#BsFyO@w|cg1_WVcaogH-TH7gJrpueT)a7&R6sXjXmj9I_H)jxqAwGN;Je+oLyx@0P? zPZt3uTb6rgOiBj@*`|gIoE9UnuWKuF{iIBc>!0i&TIBU=_P~Vkf3oJiA9uV5jT7M! z$idxakbwJ%1|y=P^%gz74-{1X6m`5I^mK2P=0Y>B_keiELX@kclWX^qo0$y94?Zab*14Y8mwOY~|bmzL)0Ne)hSu+op@p|WvmrUo<^9dvirgbYtJI?zuU zh%9PUILw%p@A+ntezFeCn;RVKmrhi@lonzYq`5;)^i1M)gxDPP;ER zL8>*@*V)Nc{zCiS;p*>_oOiQ0wW<023m8De;~bAdY0uVQ1B}eH^H#umba_r#gfpl_ zx}mC7M-)bZ>7aidHy;E{G&)WRN*z)5cKMqUYHbMj`%#?P(T9f+6>^EArhsZsW6SXA z4qtUUmUM`&Y-i5gr7cblDWQAhC#Y64hmN4`=F4^YR&wmpmL=aF5F;y3nVEtCV7ha& zfN3%2z|mRCCaejF64^HZEtfZt{6|T-QaStkHwWUs0+HtJ>yohd7ja26qdh6EF^z1g zKi_(kHok_cR*rzZzGUiOlz}1SxcBqolskWDQ&NeRI0^5r+P(lry&c^I0$%SRDjFRR+x@pMGV?BKIjl4{&1onX?T_;=wW zPlWAWZ4XiXaysK`6TZZN4xnT~&02!M7vWC3TjH@f5uTeOb~QYQvZpdY-Vp;^L(~6J z$bk$|xh^tEDOmt~vT=&xxRS!&)&7cEg1oy?=WAvP)LnoCO{u4*lN_{c=$M)h^n`=_ zsa=b8nF3k6U^`?x&&?Et4FCdz7#m4F0M3NDOcUXI;?Y0=AYt5CRz_``&&`&KFXb?X zJ%Mr=Eqzi0ZCBLiV4ys4`$AcDL-LlFwf#P-)yj>L&xv>Djr95s(h3Hh@6Qvga{PLi z!SQ>NSTcGs;=SY1RwxtSf?esOHV5#b`FAs|d%yv>r^YdY5FOS+__Nxq=IzdjYUeQ( zkYYJ?rpSwtP|f*q)_c>8lB)jeUU^phWz7?u5?< z5)cy6RwRDQkmYL1R{?CTDsbMBK5H>FD8jGDhKUCm<6rYPd_4Wl$=v|vB06A+=a(Ah zL#W9ukNBd~<-1>^f4gI{3So`+|LT-TT-iFeY9C{MU6#psiuH&7+GeM_5;ZQ7^YV+2 zDYkUt;#|*aNjd&}%T(G2q#B5$XfNTZe1|Y!)P@_$#6Dn9*UDt?Y#k@RA3vDox+}%& z0W%Ytd;RL&fRl!&oz#gbWerA{{B;6X$Qr7Wb4qzz^xjA@RmE%x2;K$#O|@SXrt*ew&9_o~{>Qw*+45pXz2!;*awgBIn;K2Ft$d+ zKqIE8jdqC>Msm0qg}0n($1VjcMzHcWAts4tOkAY9E#`N$tXmqC&Kvg(S3NR(`r5G4|T2<-A zOqe)1g!sgO^>Og^s?UdohhUEL7NLQ`wpZo4q%QdnOkPS1sOKR zAQu2w6i7LsJbf;JVAY=fb+U&Np#p)mz<&|l#X1j-W!pgym;n;3d`@E0<8KCS>q&Gv72T*7*BzXjd z1bgiLaE{CoDSHQ#Pqxt=IIj<~#-5?kjoPB^xMm)Tv=cU$dI+xNlt#BSycTE2x=sCEBUo5Dd#VIp??OlK+4|8yKM7Yd3^Pq7Lu0Xv6-%knm;%KDA~ zGJpugI~Sy@Ggqg-AivxW8w2iA6^PH2pJwm|2h_U@N$#4zkssh_zBAi=055-5Q}L!p zedOR$)*Gz0dN=G^3S9>-6L9kwr_CIUf;Uor8kkYmzC%604p8<=SZd?g1VFT`gvR&w zVdxS{wWNE7$4;)ISdk--+=irwzIMpt3@iRsm300s3iqC^#xeWhE{+>E%4Rswz z4(v8s3?U{ehVR;GJ{Juby#7EGwFnA=KAO8CpWEyq3?wH@+y z`KG%|L+aF~PPyM4u`W&x=h?islM9s*>Uvf0N0!<0!cd3pMi(jt#HkmcoLt60OHUVA z?P^F96b0ZM4(Fm?&$NZ;-zU`!ysUI?<0zszyVlbg5YK%TykZktibu_#^2uo zJ8QLL+-kO?Vej8H>|;RI7)4(cgGS+kq4%aY28v`ccb13{9W`!t%GPbp750MVnB_$^ zs|$5xFpe=y?LdoI{S=bYUy+vbkIvVQE?!J^L~(e~Uv;UkC@iia5vlf!{Ag2uazr*9 zD@S}i=C30`9R)dce@_X9SiaCwLrwR#+b_`JSR>{-q%ME-pV)a}A(vNi_8qkkDqX)V>MwJ)qlh%VH*b1bhMXUln@m^BaYBJgpUC^1BIjeAr1C70-xAu zus+bqb-cOg-^)oHWvB^|FxP|}>B(IO$Yw(7=xxpFxG|N+^k3D@6K@K@O3B=>80CHQ#{_lzxMMZ%6swXkyrD?7~>%dVik3_5k=cKI8$$iV_b+oor z|7h$^ZPqv(s|h*v-MdGpKt?7=HgCZ(I+nIq2n_R;xw(xr#`Wd5#Sv>O#XNNSv@S~_ zkj(CzxAiv&Pw4q5LSh6j9zsD#B+!c zFR2gTh;a9DF~$TAB+j0qL^EWX{kC6IFIe3qzqtzKswpvD|9EbgA3f_UM8C;WD+N4E zUomDe;46glKR%Fi8p;p*?~0lM07@AvWUcn^&rTh^oME#oH3oyHA@w{^>%KBPTI6$f z0lDL!I0(Pb-CPDGp^ltr(Za)#w>0UP#Tou3*FB`CC58eWT1zgby;v6UoU40+|e zr#f#&Nul!p+|5uYRLO2R-*brJCJ6wRK`Fh(Q+z-BB$fgMpZ!*r2SxkoHC|m~o^Htf z8UeIQB=5MOU-JX^98AYcH0bjc{kQ1J1duGMoF+%10c% ze6sJvS-OW(tf82k$BueT(4^EupQ94)v_Ex-ciAJ$e>93-ydSHf42b?9Dm{C6+KoHy z6n5uTt-GSpuZ;pVu?tYU@=7DNzUsAPse9;D%4?BHx*RfDG+03ObT>ZKJjJY4pVZJB zEr}ju2~D9Zi-EX~dL|_; zf5z^4W$}gV&ktk73nl{}&K9&2oMOzS5~)vx8S_>vfOV6*6gobv@y+UK%QG z(Oz7OCQ#}ae^abUsG7d43vhxY-z~*HV^1C6|4C>ks<%!IN5%lhfN?V_INInMyT{8z z)~Kx{^B&fOZjN&+O5SnzZ{(TcM_c41OO?iktQ7v-X`))=)L+`7?Bt=fuhCV5X?-x2Cult2g!{ z;;k|#*ZO*7xU%;;u+nQSZwTyh<~rDvdw~rC&?Vor{`P<_fUJy{M+Gj$Mj3NZBIWd{ zlHU$bWGFhG4>sR?IqGEH(AUtiC)6`ZvvV+%gwTiR-3@DTl{ITOKthge9*!7F#h(D2 ziNJq}gHy5=yI;G~0g7Q1f)0A!*lT87--Dpxq0=HcYjYNec}+Tenljsp2S1)oK%BPY z(1VW`i3MFma$LeY@m(WxctE=H7dUv#HSD0;rR-wpb}-`T?CWd%Gmq%s{G>mc zVl2hv%gy1ZeqUz#J!^>I4TLj8GRwZw3Iw)W-epd`zU}hFzw)8bF#f|rcI|3hgB=Dke)*Ws~QX{N1WpSEUs%HoXsM3YLi0XuXMoXNi zf6z9Nj;hWPMS|!=5{i#nGYu;IEKse0L5HVwcTu<4I>(<6CZ%ed{Mjka2_{N z2W*G_n!C7Y7cHkHE*(lx1-81qS*z5O0OI2}&`gJTr-e``Ix`Fx&$Az-H>a=Slow`& zSJl;5GHu-g-9oRuLO%Jfq~k-}G-a=!awS%*AXK~_BQYjWE8uUU(Q0C;r<`}6>9&;Z zU}0OG&|2zncrI-%wk#m#nu%o!8rfo5?W2>_fvLFHdNt$Pj*cy;(S5`nnZGX>T)^r* zP%dvGduq~md;s`h(>OHvk|hSZ${NuYVAce^nuN{+Uvj+6s`WCZ3ZN|+QI}j#SH-Ul zz2`&5jP~nUuNxYg@J6uLXLw6nzR#tG`A>G<0xxnMSf*6HpQm(cze6jI@_HOa!ahxr z1A0TM72`X&Qg4CsIi_sp_Ogz-=Cv|qM6+PJA2;ZyN1KqCcbKRbxnG7?=4Px_%4SjC z8m!^s2j$^_v?vpPnEWxxJip@I`=KT(qFo)uB4x8Hv$( zS2JgKBHZEUPwrvtTKVk=@tseqz*x56UX5Nk$%cHbSj9Nf<5klacMIqwQl74PXZ%Nu zBQO!6_M}ZWpRL;4D|KEIUwpo(onIH3eB;Zx(M0l<#$#vW^|>3@6!xF&rfiHDv6UUj z5}}tpIMfP>Wv28=bvdO8ssui2KRvkC7k&^5<|Y0;GB`Al1uA>@!A@Q?YMlrjj<13}-fH_p z4AOCy9_Z;x_=ZpJjECvFZO6q|yCXtwhmS{)xU-$1wl znQ8iM!T2~6cdyZc47G&o3H&w+LJTYun?An{$k-YPMqLlH4!4XKdTM7$k9Zw_)iP1H zKl0%dr}&SD;DAK-SOsYK8qXYOVzWrs=&c4vrfRl2GRfChZ!hQiNeoq#xg z<4a0FeCs}l$p?6x$Hh|Xmt|1(6-FGNnjGeEzGx=XJfX9d{-=XU==#Uw4M#_QtriY! z1Dvc+fBt#9lN?Ph${J%0#-`-QmA~AD(MoSQ`U`4-{V+FhoLjpewd3zpITmeU-I|lP-Jov@94yz+=YoHjxXRm<$9g4iy^)7B0X+&{e(mn@9oZa z|3|0Vs7&V4;U0Ws`Jb$RvTD9|XfiH$t7vjFNx&p0QSB;Nu1TyKS^0CFv7OWvZmem?CoSp!s2Y{O~ZEz4bN_MZu;F zXToB+7knEr0@H_uwwW;X{LLm&8~2ls>n{1iAe^wXWB$*#5wSH#F2u%Lse|jr<8PYI z;2u)!?~K{%Ct`FWrH+wE^z4e!hY#@W(|GEYS0DR3TrEu7$4!RjRyor zmCw-#p5Iui!HzUeu#9`&9=lw`k9d53r0?pJ+U84q-7La-WmGWwUzX&TZ3WWh79PA0 zJO6{+%u1i7hj3W0mj4is`ri&)WF&YKk|;aimk0a#H66vJnD2Yt{$5j9;`h(*Dr<|NhUWOWprTSoeA%JIDY{)wHx}6O`;3wPbhmddDf5-(l@%hCYP58 zD$1n#d0Kqqwpw35yiF5l)wEqATYYDJ|>cRi&@|E zd;0xRpFVvoGw(dly^#z+WAdRojo;Ld91vr;luH?{<|~`Um@(2n^mzH9 z!=UftJ0K%q*k6HK*kpn1BMoJDyk@+Tu6)rj*w6}w&C4CYTCY_GVa~)(QH|Qd25cKu zop=5N1gQrg5k&+<{W$>a1|ELO$)KVIG4}tJnVaEGBOus+ahA8^%1I4!X;ZgIojMo> z&eVMM&u{vF@#eNG?)_;Pwt4weO`LU*eREpxF1U?4q;!VFf5FGR{4^FW*{Xu80O|3A+H)d0I?q$$YMxwhhc6?#KG& zRnHtpa;5#xzKztsX(8F*rfuGowMI_aOriBVOaSV*3v`tAvF^pugt4}pB}=+7H<6|9 zU~`06Mj}(HCVg!3zRs1|M;$(Y4_7}ZK*+-8Sog$hu(=&_?eDN@xCbi*Ws>F6=*Pg` zl^PgBo;Z`M<4%^V%@H@W^Fap*_+F;W^ra=@q|Yo%R?I&M!ge2R;=Za6g!1_Tug2Jj zU3~l#wixM$@<2YY&n?yOU7222|F9EhI8HQjPG>E+WSaN5OyV=402Basd_HZYV>%aW z>1*kzq~!OfZ4@Z&i&x1xQvDpfb%SVlgj6jvX1Hh53dV9~mC? z{1LdK77IEfKafc!g>f3CIO@=m51Bmx6PEEkHm3#JE#8Hi( zUbiT%lQn*C>`Cug13pf?8gGYXbjGu>u!D@osS$nR>-!Cyw&c5|Stkzqn!L1s$Cae% za_oExA|WH5|6S(B!`}LB7mb^I4bGLVyax(&H+N=CF6~SH?CYIFieyf2T>bs8=$`Ls zXr?1mp_Nu&Kd|I|>D04~SOWhWKTS^#e4@Tx4_{wi^QSX5rJ{7pZIW$9TJ-e%cE7hR z_(Dq6K-_|2dR(|3(v>-(U@n=a@GR`> zNw?)EF4mPkSDBT4ReX}>K=x2UbMVkxAIg*ysF-HO;SxVi_R}fRU`&jiY}!_)LoG04 zANYs~j(K@BBeht%7*?$IuC*x#GrNg)oOFCCzCXXJJo_Xj0}GGB7``~76TwvJd%!-0qtGjcO4h|JgR01Bbkc*9m*sF1a4JgBEGq2@z0z(D-L5>C_l2^ z_T(dtMr46h4`;G{ef?RB%)qczHPThEf_*dlg{Di)?C z|3rRPXC5NJaL8}!>GctO-3;=QjVj=sQI*VgJsl>Y!nqm%0rQ$L6lBE7NI!Ta zK7Dq<^Zf8WTC3i3DS;kAjS?zCZP@#$eAROXtvV`EnFn^C{n zQ!n7W`wfd0YYk*JGgR(#WxT_Dm4-AEptqi*(bzF0Q)EMzY=i?aKEce*W+66Uru-1d zA`no0pMpHSaYL1rw~~9V)YWQ;cq8qRVS3CN?l6dH46wa`Pm&u5oySObm@MLa&#NJh z<^{)b^!-Oc&FySumXC-B zH&~>+DeJkg94&`ldkxQR8S%#FFkSl1US3?tZU)NKvb`yN$LD}Ni3lvtb`PmSr(gRDviU3 z)A-FEUUz7VVd4peP3+V^lat;3bLW@OyD#zfd8PvVH&aIEry)#qvD4n%Cx0j*AJfRg z?-5eJ#bdkvm_*pVtL7l3 zaoJs>-fn~)e}S*5L32wr(BmkPDU|+#_(!j@2@=C@=;bIJ1Nr-s=nx})k z6kZt0t6Z@kATlXcY=x5gVrcq$5I5$k0q{J)$)QfT|L^-F9B@Av{A@`VfR~thjTtz@ zw8qhpY?1(`fr5h6Xu1CXje6DAIjJ2+{8=ryvSB8aC6KzgeDClI$V5pg(iQp{SV-OQ z(R^mG9&IX@5^_G$C?+3h`!BPYQ!dB{h|U9mtgw0A-J3Mh^Hi{8!091<7RXIfcJ6m~ zZHU307(rosa_Z&N*?(L@kA25&6jrfyU&--SG{T{arI#|lW`LI>W^)t=RzI%^@WD*d z*@}Z_d52am1OSBlQj0s3 z2ICDZ;_GM&qpGUo8fSJ4?xusfC~x&JYMSG@d00_!2h<>!IbuX#iZ}2a zc24vklwa{j)9t9z5gD&oK@>%e5_OmYB1a1L=xkP)U_F5vPI^AN0#W!iGkzs^c#=@m zEm$g{$8|6PefzL>X{Y$Y4p8C ze_tW+^R1`#8S)94*!IV|1PkA~R{4!$Gzx(Zza$n0yf(q-!C$;vp_vdO9?CCu58 zIIzgT^UB%PN0CE#v+oD@^^;;y8AXBT+eC1s7q|p{IVK{za2IHKeXo+@j-&i;;l7IB za343n@jnYeG~&fHyRvg7uWl1g(a82MK%>IxI8^>LZFKPd@jiZCniW$oZU?{Q*9L5=pGoiYv<_v;ORQN z=Or}APlzMmwFa9P>}x3wj#2lc6QlbHmmqi-oIdxG3`0(<@_L~ zelW5{707Qh$bV^erNFbTjTeVbUU@>L7<1%lcWmD9>uE6H-d~;Tb_v^*SJ%t{R6Sz> z$j3TSq-E9G7HC2T8V5`%6TdgY0!n$E#ibrU?#jK0LcCuiuYKBcR+A)ej6_wa3MiX6 z8h7V(v;d*xeU$w4r~g*n2-M7rmRp0pebkrzM9TH-?WwgE`b)37h3$~c9Ze#ZD&E39 zqC62a&wm?89R%V$d4S4d27pI?mt1)p;nRC{WrktDfQw!;c(KqGWsyY zTF=`D2*vV{tEOO;BISonFV(AxQ85!k(_gY4oI*DC zPW112gdGuMn=(D!NaLr%S`iw9I_|{+(o=9JgzR@{kdn_Q^R)fJ-fA!OUI+x$9s6?a z(ITJHz^J@AB|E4SNaBc^0+8P|1Aw*h(olUzrEFcXnJ_h>z2!;K?8-^TB`zr$NH-Nb zxb#{A{rvvJ;5*tB5ON0`z&>l~CF{FUOQ0nEEBMweX@1gV3^afo=nBF4vaCK%^?x6Q zn;IQCG`X&4$Ba-SWMcw?chQ}nH0B_vt}Ej7tEYOGoc3i1Z3VlRO=&yXLys(1 z%oUGLQol#)yoC15#8BQE+i#9`zHn0t-sGmW)Lu!BGNahm4?J83L!_EYEyEqp9pEcS z3R3=DHkrD!PS$ch@K>|#|O8cbme zlgbK0U9e0t}P--pv}@zB<*U-pTJlqpc(V4{XVfSTlJrXqDV*sXV)8Q zrVOhFn`+r@Z;4$;$@jUA9ON2Rx5fMAMic+2t$pu(>WKB(;sDlyZ%oh0BQVP&8Ub9h zlU)DTMHDI1v{6b<5YiR83{&xjM065$ONgj;bQNzL;kN?MB+RAi%rR+hCQ5$@1mxyycha{$k(yhJ>IU$>P}MyDZCd zl=ThgUhFwAs`YJa4F&pdl|CiFwjlwVdMP@2THTpmM?iSoq>-T(u-#i{>d80n!gWM3e zf2}25`F7LPz-eCb{bttn6TYR-;%hUS!hSTe z)-F!ClQQs*&_T*wRQ+W`gjns~T-+ZeWh(odi%vhPtbQ;lG`U|{!W~&szlD5wbjvnb z$12Q!f0Zq$j$*gn1l$AVC*!3U>62{u1BpSp1FR(fTU9zriNfZK8ymKWt(Tr%`&0_# zF!G-nN^@+3Ss|yJe8N@|$$6>{&4dRylmb+S*GLbz!$CPZ;!K@qDud}}546x*qGNZx zP_2m0{T!QgugFm4q}Tc0V|7Q2t!OYmuU_@&SN}w}8~h#Pqgptk;0DZCk)KhWob?#O zFBY(V{I2cQC`}}@7Ti`p_jde)`l-u^rN8RAO^ri-N{(3PB^^o&BLYdPvSY6%)2~)n zC4}`}eH2!(t7lzan~|rna&n?Zv1BsF?Lm{_J^PrQqNNAmd&%_$6dK#2y83AGa=?7i-Dp`-3K(o&AQk$MYIHpUV6nn(YaQ z>CwH2eD9Uagkv<20TlNvYLG1&a%JEG6~$aOJLvL>e6X5P?*30m#7FrbhW4!@X9FnN zUhSicF*uPsZ4E*4`}b|HnVoy1j&;wmnm>=h_YPLcj)087EThZS-9<$Tl3APKX=ocW zx?||uDRra83NMOEbo=h~<6%F2{*s<)|7kh&n>dLaoq6UnUF7x)O}I~{41bA6*2u5* z3(zXK3u$0E)AWxvl__)}x1fq7(jG74LqAXO!;+>eurZ zqTs#gnL@$8(tTl~%j1lAIkxFBTxtR@*aons4%OM~eOqQ09~z)8oO#(CIxqw>b@laQ#dE1pa@s+ds>SS4AGiH5q z^TE>wn}k;`g<;$(-hT+edvS>(&@bGHA@op7HFA&rw`x018(SWg-$GN%oij|*}1_WA_|hS)A~kz3&gh_^GM?vJ_nbJqKd$6so1cSE6~{r*?aQ=$Gl z6YHASl7}yzi^Zqr<(0c%92s$p`sdjYTB>zfP``T&H&WtB?N(u2?gWeK_I!*uX58DL z30cK6%zsS|x~lBujc$-;b2RRNrGEQk+_Pn%Hj+ULHmjq^bCo)}-GHQ0aW=c@?Qtk42@H*@OCPCOrPS57X@T z}&~G7N^;7rc=u3kM>i~4@@A)hhnS`_+1^P> zOInSw*h~?$iAEM*J!qI{*bJNt0)XZE@H`Y+hJc>vE!nyPmnCzV@zGPfuEla!i~bPf z^3gj9;2x>t_OJ}puN^qXAGeEbYtZn_TYmR)2u?a&0cHpJ@_#=0WU)B&IZqbt^?B>E zxD;;&&W?9g+mVGzp7#ZFlP3G$cp=0AVV(6TP|&-jyYq$Xmj z#t^lEo@givE`9sH-_~v{jAzp?i$1TYd6u{T8m>%gX0t(ApVKrQ5b!$b*W)c@g0^?*4>8mgwx$dbtp?`zmo^i(- zwzrGsaJ)X*WgC&q-?3LR)$s9XfO0UJ02aTe1pffLAY~H}sP5b|@%&W4&@I4@xvU(QgZlyJ#{mbVjz}hKJ|e7$2p;e;V#K^=}$lA z&kzoQ_G%vVdHYF&u`Q%RlsIGV_j+&f2_7I+u+)2Vc7Cj@sSyXWQ_lt3@9kRL)ZX%w zo^u}xlIbL{scU}RIZO2A?WN5Vbi}t`fmd*bcj76j5Lj}ZaOA2c)EoXOKg4_9gjye7 z3(I}2NIL_VbM-*y36zDZY9Ng+T*l-{3A7qhv6naMWsRivS z;{~B&8JlcoTppCM`(ah4euiGyn8X}>#};h#x^}8g4bb#5x1Ya{`Tl4eZ5^np6WJ0QIRVi2 z*iA=-xv24-{dxPeP+>Uxy2^^>^Ezf8Q~Xkt)T~~K-C+lx6$%3qt!BQwLT?QfI?kY!pj!e**hM7tCz0w#|VPw z5$TlBiusclQeX`9nN_fZ9|i1A6jAnLy?T{>R9fDr%qrSljDO*l{*>aP{x+AF-gWrWIJF-avke zTlVEn^sg;Tj@l1a+1*FJ84eqOR|nP)C}2yR`iv^JB7b=V7Ar-Y#c%gq$kwQl{VqS; zw?DAnt$XPhDT%sS5Ym`(J7!(g+k-Tq7QBucD79n18pVQms;ZQ63cNObhQRc&<)K;1u9>y%2bOZ~PkoB)+N@Ci0 zF}b-N7TpscAGd=uEnBxVXA>gneMzM1lAvzz|Ryh>=BH8@_)<7LGgSZ=APS?9d2t?SNbc|gEWLih_GVv`^K7G? z&HB$KBh;K z9e>YsYxE8C8Z0t=PxS;_M*N{AlO@IUY!5Kzy@`fW14d#}KXzQpP@q3*uY*u+ykg=@ z-Sd0@0<&_zG#i9a7sqo&=%`JZLlBjQy*RtXJoZy8efGj+Fnw{MAcO6fQH~PU*Gml5x{s|0e?9XR%y2gEj(ibofY9{sq1B} z4wNpk1d(wf5WJ?%dRocDH^;g-3`*pE_Mjoo9S*RsCDbdzTFPVZSbqh}==#zCr5+tJg&ccy_)Np^h@Y|a zq#s*O@*VEv2KI~O^4SBkBx5R$&7b``orl_b?n9N_>Y@l_ClbB0_cm@aL+46PeSnW6 z$4#r_-l=;$5VSFELg9ksYT>)u>sNLXNIY(M4}ZE!oc-YYt&C7Hh<{J*F&fh!m*i9< zms2{0Trbue)OO^s*x_^R2t%WOJqpX4ZVve!5vb=4bao@1 z(DF7eH6BzJ3q(&oNiEQAbU%+K3ayKI>oG&n0+lh#SIj?`=`1^$n+tx;U;2*S2rYnE z%ApQKWm^J+Gyv|Qw_bgRX_SGUE68M}AaE|0^hIpDPoHR1>&m<6SOs>I8u5p5e~Vw| zJ0V07T}ixAQRdhg10rRcs#f&d>!Ug=VAPkfde`(sNN|(p%7Y5Z)@XW*sy5?6EpQ`* zTL?yvzypKDr))9rw688Rdgkr5>H*epH_izXD0c}Idc6E(;g6u@=>}+R1hi>n_=*Py zLfLwU@!UO<7dZ=>cOfN2YYr?9%k6;GiaqF3)>ow+mOG8B&MyE`N^E}Sm;!R6sC;~) z%EfglNaUgVE*cDDC!5_H&prnB*mG3rV;85mzBnY$JWCV%agv<_LHrcN)-~@?*JFVA zPw^2@J|&%Dt7qyJsJ7SJ{jewGftzKK3q;CPgxTi3M!T>1)F5`q*^&0 zp3YIsR#y)P$0+^8_m8(@<-T^U3P=%?pZAHd6zjB$#P3YQo6tDo*lYlsR;RA0k|khl z80$K5p>2874>099Pdfd-a0?ABuNYVcJ~1Tc2fy|0txwMy;iBoZAJp!e0H8g1sp}O6 z0Hk}Q0cn;Kir5!%8(ORXa2?cIS^^AT4wq^C>cW`u?WZb_m;#J%nXW)7 zFjv%o8rY$c^KYR2Ung`hlf)}L@Cw~Mf4%U)8&{6nBzkW(9@Mj70?s^_Bc_Dx!&_= zkqoR|c@2}u$J7_>mW*kPdVton?oZWlTE*9$rAug4cIac1l`b_bSmGy9s7^BMVs`ai z-XBw2MHh0I6p41rTC31UmSDk?BwduJnKE9e)JtApq(O#Vhlro+x2x(1SCzVBXJGIs+bbtsE&2_=ayJUssSkJZC#V2E3I)bmr7 z_b7OvZCKi{U}fq0npKRzmf6K_Kuh{TV-VZ)sgk|rDbIB(7U~hVe9PQZ*CYJ2=XiOa+(!eQ3U0Myz(J|(9@4Y4L|X)5s4X-X9Anzpsj2c~ieR_S#r*8H`IX8l zQb3YNr%+XpYU%@q!5aL!mn({2Ymb8*($wQjlRL=*BVGNXuWF%)ijULeprn;Y29C1t zV{l=-5;TD*F)Uv_lV1T$1UZ~qA4%|Gk~944HMg}c!?Q#{h^s!BbOBvR$3J*Qx6jef;i^VC)tTwi(LRpn^}1Q zzsfNK9iK(7O2u@c*&-D9t0jx&|Ngf32sP!F* zJncyv^JGfrBI3?siSiDrllY-;wu^DIrfbWCNS!^)i+g1!GtFq|ie2zMx=uxL9FPDA zM$Jm`-(y2)a^!NW=mAuYFCE(9 zkXdcem7`mbEI*M7b;to++-ELfqI!#Hqou>TGP~YeQJ#rUME4f!yRdc@FZ(86vh?5{ zVHTb2Bge_~o1kQ5OI2_An~#0A(Efr=%@seUNUDQN`>SdzWZKh%#?WM&>@*G8#|C!P z`(K(4?+VQrCnxx&g3ZP`zXz!6%hBK!ToKYFw9}N33vcSA)G+%N6C$@BGli%4jkh0y zh5e&m{>+thu=ZMO51fVMcZuk;Px^<%ciJx;tK`{DX2WeemhAi9dbMNe+o=N3c^#1x zbgIQI^p<4zqd1;~$)};f$I2#6a=gu`DIsrMnHJK9s_bDFF_IyKFZ z!f@ssJ`Y=X`9}C6H|br_=iSJ?3(>h>OvV zU);*pZ)|b41OgNHLY3roC~*#)@4-g)O{1rGj@Rp!^DY_hEgJ#hCZpphOxMAKWZAFO z{izVM&A>#lk~Kw`kbw16C=I4t?YeX_<((4-rb?IP@xzzM3K#=lTmo+5a3e-TNKuv> z_SG7&>bx9y`^jO7I&gnOf&$Lk@90{<+B%IFN58!K-zwz5%kaiq=26*+8NAhv@L((@ zp#m*-1e1HS5O+tUw_Av&(TDWM`;$PIby>WHyZv1MZbgp3@J1tBb=Q@)VCX(#A1sP; zkILRxSr+FX(QU6C9Wlgih*bd_)#G@#Z(xdSM9B-d*j?UkFl>KV?!i+^vi*f3*80LA z=5v@@20uBZ?n{|>cbFv!|#EJ%-- znBTK7UYV$TK(DN#O40mymsxAPzT&{z2zc@0;92H(OO?cq#<#M&W zhul|Qbw4=!T2a?t+BWm9mXKHHx2Lbs*PFJ@00fzsA8)s=mwf1J8JV3ZG|)$%0gfXw zX-cv*0L=yM+5M$;83Di4<}B`<%CQ?C9X)Wk#ij2@m*fNy;z^tSyt97C&M{`g!c^^z z4PUd8|Hz2nCL*tignqZ|VE0~Q2wbrnx#GFLm?PC_CSY(b3@<}R+M?-dNsZlacxQ?4 z=a;Bnb3)v3anwhCYX4(u?Vt{*_+1eSk#n0SDAFq4EFWg^!SS&jr}gk`Z_VcP3D0wz zZ^P6bpwm%WfgG5t8J6^*3RL{z{om4e{Ugt9d)R7=ISY|bfAd+}wiCtjg}IMc3dBSq zOo*z?y!GFzeH8+4`n{#{fYcA^ZM)z(YM(@dzX~>?S?yMF^qdNv=P#d0f1j~><>kq< zCWv)X4-N?L)I0r0*M+Ta+zl<$(|iBqrwv$+ zMYI#*nlUGAdv+!idKn@|O9g}IltE1SrLgQ+>M~=gbOBR}ma9Jilg`vybpDkD_ghs3b;VOT5va{` z`i=H_y>Gnr8+@SN$L85Eo!ks>h)mt`CTGxRd9&)47AP|3rrp_-s{g~s{?ZX`(@Z!3 z<^~3^n%L2U%ALL^Xf;}+ng8-g^3Bs9l!MzGm0W7NuwTqIbK>8Y^;xlWu%#7Jl9cQ z{r2LEzZ7^DHPtMLrhMobWkLRlC^MEd#N@jtxcP|m)v?7aKZ?xAZ0L~q#(rZ1Ty?I( z1hK*WB1VJisi6u|Ay~`p;+t=2Ue2MP@yaJP`as-uuO9+b^b5hD^dt|Cr`Y!e-hj(i z<~;r1owhG!g}9ih8rGR2>z~Mu%8x1@)~m_UVyiKfT2TGrUYzew6m$pRUW#i@_Czpu z%n^2CHv;7@9c6AuDgI4{FC|Fl*RErDXS8)v(z=ZNxlh=8Lc=R!v`fF%MPRfF3fem$ zLVdYWjKkTQaJ58L)sHcQ!^c@mYoeD&u8am-pQtNuE@E{PhBSa>g3peg`;UW|>xH}m zaVEOw#T>lB?4*KrJ5zd-dqO^Rw$Q%fb&!Td3wEQ$16zxtnH6{_R;S9(AFe!>igxTP zw(KiMZ}m`ax1tEfR7I|nNVH~tCT5D3sIw2RM~Ii>pNJ2(SdGTKh(-;N)ZD{W;Y*jjAqYhpB|xX?YLGK=t+mLkL#{y~Xv!R)-tP8JTmud2LZ zzTR(X^p5)1wj@Lyt{@aDGKClsWdXf_eh1oog*$TReKn=nmrw|>@JK*lf}RFn z%u#Xmy|18^_G~M~@PRPSN+CR8MCLnjr$zInU{C`iz1X6;?1ZHRkY*o1YPDY6A?=gX z3kS!5zpBM25XButK0Ku%5oIs}oq|!aJw!mE=I1vJ(c&xq^34r=MarkV>B1TrmvdT; zgsDoGN=bh(Uf)5NNu-1#%Me{#e3eTpwR@?<^qr^2);AlF8vxcfX!?{`=o`n%3N13S zJ!npRI}p@0^22L*Hk)*JEm6Mr{m|$Q?>1KBn?UmeAHT{rug!{qXY`Nr&iI&iAEWN{ zx);*ZqIsgSZgQfJCfnba@qE9d`}V*cWC*-8Se{d3xv8zpzR!MJUE z=G!~#hvQe?*;=)Dyx3)c-ju+;k$m&}drO+zi=n%wSCY0kYqRZ1PLQ?*@)Et8F0Yv#f9uw2g5L75=psYy;rNYG$n8^2OlTm|m)ysCUuX-a2>-2` zAHa&RMaR5hulU9zVi;3PJl`)E_O@PKa-3Agsoef_pqyuyAERNf3pNOE_kg&twayiH z{0&fuHUC(X6cw_FV30Fmx0yg*x5S~r=g6I2qxA8EddTOTsi`;@&mG{D7}XlS9Z6B- z&igu9o0CO0Tiy+9h03e@A~JwX_qtxZ#)?baIVa&uwN}SVy2d9 zZ;{UF9uK5#)-!r1TSqzd!(po7_tFKP(dC3j-|)kwgMKEcOU$Ef07QyuE43LXynCu^ z)P%5gX8tU_9BQCVy>~^`OP_YNRBVmmK2L4N2P*@F>3>uqKu&8gOCDZu1>Rl7C2V2w zCZ;Xzl%Whf|HxVt0MN-y0q6U&l7QkSzW8>39ve~}teLVhs$Du9uzLPhE<=A{~u;t1aELM^v6i}Qi;L#RIDix2E^^h5S3TRVf z$w*mHu)SZY4eO~rG zp{us(kpmqgNMWy*Ln=10YioyVtG~lF|E;>PuLW9|5lPdY={!k!M_7KmJFu-?CuWIn zQ@HMyN9U&0{k{w094a07ThOSLl-qI}9zpdkNqI>Ct> z&*EXaT-UGJ+NytD9(j%Ua%$u?plE2(XkisU(<8W%*``7>Yb4owdk}tXSAxA zz*w+;^OURjcNtp=ww=2LAcY^NZ)L(w)KlypHxy65CzxitDVO7OfKfrePcVDGEL_@# z#+s9ITS7t)3$%5@@!;K7P8UBxXnCZ zW$=KYQ-90D=!OrY+0){-{JMR=J3N4MVxhMMc@ik(!b5YhULK)qX2*yFRJG`%+IRxP zS3iRib8Ga<-=_%lLONhBXTtnxJh&mL4rCEYemaWj~> z?C%+~@Ys{XPrVE*?c*DE%x_rHZL~6i?{-Afw>+yoTB)`+om;7P&*aM4Qujt% zqH=N*rD$^C9$igF*O8*qz_9^(LsQXSSE(~#a^B2(&va!VSHqx1F=d5Pw3tI% zp1oP3>GDvL;|{r;VxiorCr{5KjZT}Yc8=(W&{ znAiSxvfB@owA36s1s@~mbNO~;MKH@*h7u7bU=xrT zo?(0HOAr5B7w*DsC>m#YmdH8ZuKhiTzE_eHAd$IG)3sZynlJK=E?3q6ag7jVsfZ;t z4&9S=hyS!|^Gd#O)}ETr_8E2Z_+-V`eHM}QB5Io1!=ZE3#`GYXntBuVdrD4iJsFf#d_^h zan2xe30Cd+ZZ)@j6Pn;S25hpr@uCU+EqZ%zv=E|#~;H_hO%g=S~;wn#%xnJc>)x$A7dvvhrMR5g*<0tOceoH#4;X3 z*kY+rvRi3{60BEfq-3A9VnddJ%udx!e81>Qpl4FD@jjV-=9uy78BtY^p7s`v)QbG#^=wbWOnsLAT`owKM1HD+AzCq+%fGK{IlULH&p-uGt6hgp1DZt ztkcHK*}o|1muC5=(>Ww{J?^@?;vh{*QhD9QM(VGGoipa5A(b>eV2&F<7S%wVhm~fe zs*2T1I9HxyvwOWd7Zw4`@F7{Qmvf{~087@*Jz>XU8|$2Xxb%dDNcV0KCLKgY;)Eh` z&n3QbNr|mKcZFpUzevIPvavds0euPsbP6sHi1i)H`k@ch@(R0iz*|Th=R_wnG)WE0 zAq4nWxoO<`!x6ArD+|8&3DD*DN4i)v44KEr1aMcJYZn3(c`R?gCemgztZH-yvOgP> zY=1}$yCMDbM8P+SyXX}!eilLT9FvRx%|L%rycX0sj8!#g0q3lhY-pauWnSu{vTg*C(-h%}|uwCk{n!&drKN|#N zvX1~Fdcn&v8J3H(!kw8Uxd~mzqwNuD!nHP4&vI~q*&~;Y$@-lSIZn=>36CdDK|b-T zsZ{_ZHT0NPC8_fYA? zS#ipSN0uyqtxmxK@?bx3C}e zUc}fU+H)te2J+F;deub*RV6m=m*!wC_pJIqmXQ~6k@7McIp|5m03!)c8Fez5z(YRB zM^17{Oq6`fn;6|PIcn^}odC%Nhe;k?D4M-s)@|Mdo06sd&l$r#3^1c`f75gR;rydx zqG`n5E>?;fuNZIT{By1Uepr;S*~n!_3(B%k`@dC(bi04JfrC*)?6#tINe~Pee!8x@ zm_@=m5uRCsAB2`yl3ny6q`oGtw*TE7Z4A&pD3+?^sy zW$iH!UFHD{kAxGB2{!2-P}GG?1PmH9)M&FG#z1HuN4&y0(cglonwID)OlRVF9V&Rs z8!2-ZG4@w-NPHUUtkpE1bDp->B1Cc|PL8jp=fbBShfHO?n#@wKj}q8Mu*oR$ zu1Hbgt(A1jR~^V067u46>cun)_4K<+UK#c{69patDNM4AJ_|zdTp*?h`%{j2*YWSS#QgXg{<4K8%J*hW7Ct< zkFch-@nCc?OqFbdub&|`QxT7!gLx^T?ojX0-zSS^`_Ysk%Ox6yW~u$IDE3m@;T$h; z(R9$z?5+S?q*`H>`-Aud;Y0BtKN;2|lb6k$pE0KS7Vw+jbr)!8G`xJ|x2fv%dibf8 z=`aPlKa1ea)lSs@Nt|fjOS52!1#F7GcARVLVdfUe(|#UJp%nWn^H?r|i8u8tPsEN9 zzb_+o)xhvPx%_a5E3THn=ggA0?AMEjp#77W38!EGTUEfualAld*sej*z>FKg_PgCVeYsBP3!uy~hoBL^cd2bH2NEPeNm2Aj5;eRCq|BXM~9 zXLkG%!nOZjwcC6IF6+t3#7%BG5k%x`@g7$D^C!lU8Ot_Fn#k3!PlJdXxJ zE(?5-fYU?ymms~e#NR#w=kn;2VuJe7DH+D7iI(|%q)v(lb;j43@=bnT++sz49?S3o z|Bt#}pfvs4JklPP$N2$u;apk8elq9AQG?RYny~y*wok!GfUVo(r_@ch&blt+(55-k zdg1JzYTXvn+G41}R^N;!R|GTH2m#_C&KLmY$0M)_NtAA*tJ89?v~)do(i8BlRXGC0 z|D~wyX-2Ee9G=ZT0!x`^Ngd@$$6z$){Yi3J&pT!n+$+$AShSYsg>hBlz;M8g7;-q? z^V&2>1KuQb_P#j2j)+zK@M9SRkx)(XZD2b$gUX+cebp_S`w`q~X6XDmJV-6j-jFLQ z9m8yM+}C8`={;H8pY+b*W|*kWs~zQG9s1f|#fHSwTBj9USQz)9&0~m<&gghwyWC#+ zI)4Q2)!~E19J>|8AIH82J(B~mL4uY1Qbk#kL%$rIC%?e0tM%7HE8%e5(0E=Wz^DUc z#tLx7hkTBdH527c$Cx^S@L<5KYuj1O&aQ>rHd$kX`{z$8uWIBrZ0DFYq)%yf4hkK% zb{$V(S7*)0U;$02-A;Oy&Ng5DcJf0OiwRv8vqDaXb%5F8*64UqG(#`d7W|Sx>82)9 z-6x*v{0gt81+!^MzZ&$w5TJP#5%h3S8CaQ$?mJdz03~=-aGCQ75%0M28;TP9%|D!E z_acA?lol7;^1u=e@lj%DcB;iWnX2L&`_lCLFaJWql}QC5A?}QA4FSp;D~*n7=`ZS# z?teuBG3@drK6hBi8SZ2pe=}Qeo@2v#4ygubKaRCa5~aO8Cc{+G^Jf+IJ&c_zJwLsh zG2)C29~P>Sn&dn=B6b&v#E$zY9wEp9oT=hf{mwa*cx?=Kx+xnE?p#TDUd}obOaP&y zenWW|4vGwZCocT_-ztq_Vf$6KH<3nFT`H-duZhk8)gIAKhtu$>c{1(!+fk2Ms$VUr zI?BxqjZXvIo-9fNtx)IZUh*#gtx&8-7b5F$`OKuDD(G9oj`in1ayA_NG8 z5t9Is5l9FLnYX{=_xD%p!^cBN?z!i@&U1ohRGIrQ_%9&A2=7>}??PDs{i+$3*6WSB z5s(FdZ$(K7ij%SK2#Edye+q<*S`Ucf4%dj#kLp57e9TGJ1XI#hMz=pJ+r-CZ(X%r_ zRE&kH&GnQQRO`pKi@qHzTKiSS$VNAQF~&AD><}Z-N!b7V@nw1dU8nwQpn3)^T)l!L zO2PYIFGJyKXK`v$8 zTNog8fdKJ~m)v-Xskmn0ZMX+qovS>WD_HMIZs13^G3HBFIC~?4JBLDcg=!L2kSe1D zkln487@+W6>pAAAnym=>vkK$p(Te9kDUPt1p=A`%U&zCsu9j^PIy{NvlV6M^U4$_TfQ+Fbt%S9Y+e#$oj5hZxjb^uDe(8^Jl7Lq!Z$9=WITOxN?+(w?+ajN zDB$YH^+n){PtSR2!MS9yE{0w9Kd#yrx5KsE89?3!H2EoZK2Q1h(8W6yGc*A`{ledC z(}rb^G3U#~_J8DF8d)+R4dVYCF~aJH zE9%?6^exwp&+5utosoe-WjLrcO=;+SgetQ_rmPap#6L9rx%r{mj5yqpx^!UPvSpw@ z7kSh@WEH-U7PRt*;QX*jQ?3DgYWQ}!sbcejO9NG?Xa9kiuO~BOT)#&QTA}*}j5_LE z!M&{Pzh9-Wq$0l(pM$(1+&Ttl4C9&vP`z6uNka4(9-)J~Q@4t09^&Y;Ht#y8LzxU+l z>AO_7V|yTxTb2!xi%?88zB+QvH z`)*g7DmL&;$167W1gI~^J43Z51t8p^tT+IJMU7cyiTp)%F>o-GQSUqF=3X-%aCG6g z*V84VPRi*1hf8eG58ZT2tN#B)v_+$+qes{6KU2w|wJ3EukMN2mf8|?a4o8B!pM66X zs9eFLzQ;nT^FeGpoqFa8=N!*G`JOQEyg{YSzR+8#8c+9o=@pcT@BO?E5X9$vT+i2- zDWT|A&z-+byVrdhn>|v0IP-jV9GFst-tO{FxS9#i9s$w)7||}Qh#+sYF!nU@zmRh9 zXV=l_=rW^ctcvpBVa0W8xSC}uL>uuL?G>(V(kUVn+CFOUUwLy+_-?B-yRe|h8~)Dj zi&^)t$IrZ22LP=M%Y_xeiV+S``xLGhXm%u+;2b0F?%g5!v{3u}FU((mg*?AXvTL;t zdJ)LjM(}cu+>m2XoL&&9W+bm4sxAAib@=vA6#8O0)_qZ>A?9<{dy8N3Tbqf*A-^$` zfL){9=bxaE2i@%P;-7=Nd{NW#kap5}mL_3Sid3|#+;v4tfF!n@t3qLcg zef0TZ|Gzt}wmLWzPG)QhD%S{aXB`;XA6A3&Vh9c|pK793(#O~hpgzpSTKQLE!xJD; z6iX}@V(jH-f#_nr*5Cc`JB;`sB;UEe_W5QW>6t;?SOI+uFX3CwtXnlW!zpcH@3B!^9_??OBX7;0O~$#|;$Yt2`=Q*eQ!AFIh|tBR4^@I8`hpt@rq z*Y zGalNDoRc5weTV#SIN*Ba*4?4~h9yqyW?f{08UCc?kQEmFxgpE4yCLp;z|~_e%JIxI zmM+7`+~V`~3_EIHskb3Pt1mlk@HU-ykjv-C6G=74E_50|V<8b|o3;a!RTFT5gZQ*p zm$+{_S==%zS2cm8yolD4AFz5-75Fo=&fMKyRdn8_F_~Kh1uQ3RGlFU)=*f^=Mm4@O z+Q)aH3*Njud$Z2q0GRQ~=#4~yop)f4?ZVIe;(vNxIISfZ#dQ;uH~b8G!@}h26*-|z zGS`Tl@gH4L(G3XYO(hsFewnG84!;a0v&Rp69GjYI00%O+vbJb1v$MJlRj;-|^~O2! zhdiUsJpU8?sdN@rH5Z6RzDxUB3CyzZZZucwUyYLyx5wRT7sCBuzi?nw1FlOgHa5 z-_CkR{dyXJ;{Sn$wpjH~TUS-{fD$UwPlC?{+d#TKVJ1J7xju zMZn%3f3}_ukh_`ezz(3BYIge#d9gUHd~5KT((J|O*N^TuBJW!*?CvD&LXT5jI5dCV|2bEIR&GQGg!%lVF($U6ASXO8nl&-*~j zao6_%bIS1Ja7FrTwr}|3jw>)d?wv5g+g+h@w4p>;Zj`?VxPF4;5*kOn#{*-K*T87?>Us06p)UvKdR#-aj zO8m^pzN(|B*WHuY6820ILv8cb=sgmB3pG+rlE#Bwqz-6Fezf>RO3N7YO?mh>)m;(o zp9Bs;MLv-^u^stgBUvFxcF|dAq1SIQ{2SZmC(PXlch=#S(o>~}GfL>vZcTF!bc1#t z*F`33-@R8f@d~b z6%R@&z^)f0gQc5)ToV+1KQ0VlV>pWNUH-qh6*v(5 zE~+JyKEC7lZDVvG-X9cjNh^DS+TtZFdQxayGzas4Cs4&Ta{-?|+_p=s`1Ep%EU_Vt zBu@mM!~~PicS#j+uc4j^pvSz8hVM2T$vdyyZFbXJz$`U@AVo!)$rr@lfCs3{l%g%P zUGX1heuN;mv0CdA7myzVz)Mgw6#pZ)>#IAkuGH4{B)hY>R`^xS*IPY##A|9_bKiqj z;k8VY_$K_ruR-fhB-@W7cR{y<^r<`uH5RuA?0qcP0zZ%bQjERFxK2pdi^32>+LaY{ zXG^mgQV%ECTFKPes}0=O$~$yI$eL!nHb2T3lgOEqOQTk`x^lStQ+6r@4!Y?(>``>x zOg8e00^Mo_)r&Gv1QNM{P2g|UJKATFWJ2ca?f@(i(IVXx82E|@HKo4q}S&JTFN6`#yfkYZewH zWWs!dc=BT#=xuSf%v(%rr~R?A(3x*IuoO>|8{dEQc6`J8Y91*e%-Vv0m}BOmg@CBd z0D`)Br?P-3bT(Uox`5oP_^4x2{r=nU_cv_Xb$6Yv4QF}#6YJKi;dkNh#-j7F$^pDE zE6f?x_PeAjo!<5@z}G5v%3xLUFRO0kjAFzW+4`4AlcCPXzx-(adV(%P<~~N0Q$d*a zjRs%aHs*1ZV0q4+y(OfdkYKqB8~n!;V3lQgitoS4*Hb=}*f!IE1%0$;M@Gx-+l9AI zX{bCIi1pvv^5`^wTmF=_*N4nmwnAGAy7VgDmZk$=4-BqVCH_PaB~Awd;Op6^{!^v; zIr06M$WoFN-1Sgett6a8srz-)O7@D4Fe1@WxZc(-)@zgx@jJT#@DY0Uhu-We?W|gT=nNJ!qeuT;zgQz z7;rl7J7ou6j4)=5o8B&@Cub(YMH2u2|6w=-u+Da1*@c0VU(=Ncz(b~fJNb%7A2= z!FOmjH+oLGOd`L9*T~h8mQB0zGEWcKP$XBa;%vph=WC&B(iM>P*cs;hlV?ehpz{@h zGcAwaVQ)pI`9su(B>)$;t&J|7!AZbZdbYJ$8~u{AgT7sSg4o zN1KqVzb^MdVp8>mO@ZY*w7cpD_ z2A?4Dco+B{sLkfD6*bO-AWh}ao_y4xJ#pFnVH|4CNo5VUy!uvd{Y`a~7z3)~9>t)v zDgd+huiR*W3su47RH*Et-m810dx^S5b~cNpxInZQdU3A-7`( zStmm*`_88^MxjB)%3fZ{uAd+!@~AKwbz%fP2Gn1koKCH~0(<_(ijp}JJ?FIym9Y98 zC(hX{3uP;HYH1bcGMgzVP+#O*T4Kj?v_;R)(-|GDM}cPPD|jdO)HKh50s}1aCop=c zN)iLJ@_!wESKxdh%W}Z^*z-t;BjH>S+jnW(#xtpDY^P!L@rVMK0r_=yKU|IMm76zP zbycyk`+WIH)<)u!_hS7zP(o#13tsL3l>Hh-Gz&PC21dv$fQYpyw;bp{}mffi0KVt3v7lcHCIx6zZ5olTi6BS3p z?5ZJh0pxTq;>MQ?l7wWQ!~e z6u4ajcSnW$VrIuR(>Scqb+$&J|cidqJXz2 zDw4;k=@VskJWOe_r+T^4HcHp{XM!|5Iiu!Mzb8CVPfAE>fSl0AI z2MyH9pe01XEG%EHE&7bzr> zOQ64<4bA9NC-1gd1%0m&XWTwKv@hR;-OHg-X%IoHMbFQXdi&Z{fzu9zgLpd0aIWK| zyRm{f&)vjr#LT5s#aSCp$%&K}aVOSZr8eJh`I(o(ec9F$W%o+h6&rQ;SyoH5aM3jw z%{lT^G|01fA&nA(Clz)7ovY{6k?H0bFcsPUEpPJ6S3p_*KetqOuGl!u+JkOn8WF^B zxWx_^;EdN&krKHrL5V$rO!*&_sE_pb~C4%20f<9@|qW0^I8*de}xE-Z`3z;>u);zXrIjH z;CM+%$;|iYp`=|2*)LN0sB{2#2?YwM?bZEQ@!;9+RXL1`Yf<`|W}-*S2T^CM($M7- z=YdMZG_(H`Ey_WADM5}anE@ul*?6{vy@CPvYLgJs;QQ&z8KYT(gWpm)K3imQ7~1op zApJpno}9!(5oYp9@d1l73XDAYkcPzMrjrKlgE=sDF-n-7=D}YGj2J+~l?W-m9pNX5 zA7qn)X+DxVm;L}jC41Z~zKu8HfM8uJ?##(G|gV5p=iBE&uG587as1$)y6_wVY2P*+Hf=e ztz3m(?`OdlAKMm-HQSaT=gVZo2gW?WlV`ACv4{KRJuo$BTI<#UpQ;?U08C-^D%Smt z>C3G#m95@TiQNME_T=%PIO`<06D>L`03axYIr7zS>F4XxCSo!fO=%8|`J>!LZ~&o? ze7KIK3W=+2cudxOXdk$iHlH3mrfq{aMr;A!xxnR4%>J9G9Pm8MA=LY06O_ zppTv=HAVU<*2%3OP-zQpMPsORQBbydIIdr+3ezO+M|#5aVzT(}3gJ5(6oEXOHr+K`i0 z8_g{QPlg7UuI`g32-x|PTkd-8!u3BnJq_H>vyt#L3L&1oQ80`(nKVT^ibu&Ez8HzZ z*&dx|9Bj(%Yhk5k^;oRDMsZNS7XQBODUd*4X$=pD|G5=0Xos5wYAo=XJU^$6?Cxpm z8U$Dc6fsBEjJ_Kp4sqY(86CfJg?cR)@kpH!P!Tj(>Zup>d?p>H6%S8en?ne4!5nhZ zWgJ|1;0r}PqE???%X?z~eliXZ3uw6WUqc7qITcF&E@)096Tib0uto_uds3#$A%nup z*Es*J8;X^1Cx)B~=I`{NvV-;SMviVL2Z1z@zoX#uLsEggfy*5H@vu#EKf{QID5ZWe z%Q|jfrIUtcH03TLE@$KR5gJJTj1=u1ysl^9GW4@}WIE!86Y5O;05|5u9YfOlWrM{f z5|}@z&`iH<(flPC)>9XrukKn|wA$H+%Dua`O#)7aEN_465XdIE-PZQq!T46%EDM;8 zDH-}8r&1BJio$8qPFi05y+`~fIST>EJLZ9G?3?Y?_H#9Lt<>8ULId>KAH`g27&=Jb zoK>Mo_sAP?7~cQ2KB1cWpT4%eZ{>3gULP(5W9$#7m&?RM7kQgB74sNhjsJUDxcoF{ z+3-8XZXe2=ZgH?gtsMm5>8Wlzt-`5SB{Eiy~i{ii} zbEsBEkv&t*s)%V}Ge(<4r^OEphDYjniS3`FDp<}`aniThvM^r%T&2BZ!J9FF*@=$= zLHPfcl>_{$v|Eoc;c?El+bv-t6>LXNLc7mhmx;dDauEFIvWX&kgp+_hDm9K5+k`x* z6S}4ImF;O2S?30p^@VztXo|?k-oCM}nTk-zQny>uQCaHqX;#avEOdVZyj_TB_lKS> z+XqCxmwf{Oa!>`h5I~N|-1cDBK>tFEi}~@M_m}fg^O40$%E<*pnVZHB>1`XP0&c8a zP!9WnF?cP;tY;I%ItlHzw>RBJcE=4!!3C=1`KE&dtp;LUN4|5a&SXd^A>y#iiBw%> zv7ksT+ZLsnZ<7zC(?F;6^E$Clf%nZgO&*9;P^Ig&smB0Mv|5WemQ{vVqcDSlG?Ixq z*NY#Op|*(IC?|(HCr7wSPJ?w$#?JinRk6QiZ+$ly*J@v3cX&*nLu}M+I|zmjSQqai ziD{+SD!SjT1UmWaj+>0Xq32j@8A<- zlQzyj@b^6?Usgzp8Rp$)cf=>o3a^&L>vt9rH4@MwwSIs3o+*R_u?_05sUMwtS8(ns0_DFZ0w6~-|nN4>VMP76jv3%7zMnh z`}EwGP(k2Jh+djjkOM!<*W$ARrP#lhanDU>2Vzb}NH;#x+#}7;wJpwyzIPk0jyYw* zs1NcCQHd>_lU^A%J@)Rl`U!QC$OpD-G~#OUmF-sUMK*isCYe}tb^!Rs0Eg`GF%4i=pR7zXyPz zF2T@6+lt!_%=&ZvAARYJi-by#h>TnYqP*IX8-}U$J|Z zx_Y}7tAmwZ>uc=oo=m3^&RGZvi%D#hd`GzGKHvGDdUq>Q=NfOOu7iVYkBmDe|8v;J zROJhOIxy!CarIh9qbR*%j&y5Q=FGPaaCv2x`VQ_OI1+l6pJ90R%kfdaH6^uScB2h$ z>SrC+^9qhzYeyg|PUg{A#q0mqh8a4hY=Rebv`flzix#{=YuEuuWpFsu^J7YaZbDQoORa|Lg zRt1}6eRD1eG~JurT)64k*p?+5`CkB*IDHUD&{m-D&~6p->^oexM0ERk=rd`5J@UEe zy6)(dcdldSx_n>|b?R7&C_ihPjkw15 zz^s)gDIF*^g3ZE_ zQU#Maibr+(->C-5OB5->3a?Kf{y$kB%<33Y>)_`dfwp-nZ<+TbKQqNYk1~5&+4YupH|Aqa02^e^}VH+>Chlp3Kad<9NSRGVUx0FBu@WA zE{Kl<4q593AwhupICkaz2kcYrp0xPDI0OErpSvx-c3!#I5dLT|BIW`%v+$Mnk+8R( zArY_ak4t;Yy1@Y&K~zugw+J2gB@w@QC+$iEoTY|h!K7_`1{imKY};N%bzex}9rVYH z(OA3Mb#VpaXRLOO@h^s9s{4Gr=3hJtw5K~)`TrQ#7w<=f?ESQktQW(C2$bbrY3|6i8Cx~F}NlP9>EihA1o2%`>7QizM<8gM&NJtXIU&b`Kv2PoG zB20#82izuk{;+=-Av=ItrTlk^lYY`~gqx1~ZN>QGV5gN<Vz_+6C`G;1@lFI7RG$2A(JV#p36Jy%L%04m^)AMa zm({VpO?`-Pt0oAgg8Ui;vga~~Uc~i;-DJxk_p0vTdhPv#!c#-tI@*!4uAqM|ADZHS zhB<}scLd9MC8CgQe|Aqf9r`}1U(f^z7i)-{ltkpGgS@)o3pM zdGjwecCQ&@F{G94>(6>~RJwwgwq`o{9&4oRS*H+HMRO>PbUQjOY*qJfxpk3MyUxh{ zyT7IP=k_LXcMC$f6zX2-JyUr}7v@hRB{6@bWBAJv-V?JL&J*aduIq3Amh|Y%xa)Q5 zvwxk*Ie*D*z2TQb=TCTs1kvpQN#7Nx-|crM*>k1;``3Z;0e!b4Q{cG|hn{sw@3K*xFWOg>ip?O^e1 zY$van($VIqFw;=}u()hbfBnKM_RW-PRo%a`OQmG#<=y7lUC}A_ShCTU7!|QG%Rjje zfcL&iBb7DS=_v=MmfpGM$2SD1Is6s%_$6o>CATV$;FL;@KEc(vHiP77rX#E!VaDpe z)gHoQu*^XR$@BzFp^aA9Ts)b#Jc?D7JM!NNsa-g(oSBXEH9M;rmt^0;B~DbPJ6#`J z+d@%368vSQja8#xX3|-DvTF|X(CMWy=DDNNW%0|iTa8uSRkH6a9Z3Kf!pm~QfcQ=f z^65@Px#4bx_!22q7GC5VoVYW+7c(J2|CO@9-3F32qVq$k1XNMj`H6$D`l-Q?V**ko z1bv-$pQ%jXWwDl?Zk|Kcqqq)5bJb@<4}W0UDs`@@dF?=d{y#CUtubpf>p=3k&+#NQdvCEGkL1(v-@0ySani1W14}l;Lx$BI3RU@? zp6lHSWgjE!lw0mJpU*i=v%f`sa=iCQD_*d#c*_4Us&lJKg;88(fF06SO+vz0HFnFt z+(z|Y{8K>e1868XEXWbS7-@*sNp>B&ZV`BikYI}acMd#vLOqu6XBnKc z%OgX#hE26}6~BZ&bdD9g0y`Zsr+{vs)x09_ZeFhGHZLN|qxfR@^Jy?CF>(YhJAytB z)Zzp5M6hrwm`48=Kbx!>7n_XKJErOpO#l|6}4D!3bH@9l%=RhVTA zH*)D@Heg`sRc@Vd%YW-yu2y*vB#BTaAHC;}+ktn4q;8~{367EMD~-h4-spp>N=@+M zKqf$F1hf+1Sl5&*zxL6Dauv(_&%WvwH+(B2mjVh6@5A!s9|8TT#BwJ zz@&*E`#9(M_M8%zkXSfT(`cuk#EBe?yx*|mqCl_8P>RPE3*8?(V?u$M&&6Xpe`Vg< zB6#}uSzn)6d=f=r&?*b&$R8msoe`kC5l*u4%3@Vd?PteOwKCc8ScnH8I1hXQe6QgV zh9?NF{jl|NCNR?`0v4O~!^92#UkGaS6j?j(Uw^f|%@Tbi|Day?(a|HdlQX-X^;Z2} zJ5Dk@c&y`!d9WhDYgfE14Y@1#VU~dgTXB{LuTN@|{(a5YsiGqd6k!Lf;a8LzyHw>3 z{M4xi5%GiDH9#s9LmbU0Hnk$wL3Ywt9PPn(%c>9`UPE@=)TtXWiM_yX7ZE)Rw@m~9 zA*?)3!zLD|u$K7&RQY{T{%ed?5w-D@z@U@hA{N&*)oEd!2lWwy2fVkt>F zy?qYfWJ?U1DHBp)s)+5=oS5QP#5HkCk3A>IF$*)`BZiC@H(-x3lieVR|E%ASoHX@} zTg>d--qRgV)Q~cT<~@iCkjOz|@$tHpUl-`h*YO)5I9(Z3JoPHC5X}>lUCgrbMXV7>P5a$XY8pJ%SVbdLv^kZBl68)B~iqV5bF|< zPEBPL(K@E~4c=qy^bpr{=Ie|snP47+C)I59Hn7!#e-gY_H?36T<)|S4NaP6?nzby& z_}-KPt6Pzx6eL!koXsm_%lt2NE53?r>25{564oV|m%U-pYq9;g5xH)p$Tc)%YgM6! zen2>&ADBLlb}b_&3NIHMRf+OV5EW%X;4nmA_(5`0DqHb1W@FYiAi!`>~ zS~8s2-h~q?BJn%q7sYvO)#tuwzHTRKmz_4gEFw#WZg=DfN%DtswQsP>96YAINgP@* zh8M3^@um{|6|cej_|>&jy^B;9qA2pVIS>@zz6yl&v9NSf%vq`E7@Tr3&^m;10!XrU zxDXmFM%p8MZJbtM%5UODct>ngtg(+~4cPb&0pWqRi^+9YJpGl#9yfhY0`qkFt8*!k z^RVH<^)1WbwZQPn;%Ijl!>k*h-!2QbwqoSbNOK5xG-R;cEWJqI_j07)i?B53DF2nU zwRfW90vYLBf=x8Cg>}PgV~Tw;dn)#ZROYw7WN$H@yt%9}P53>A`AQ)1ql!QDAJuyk zys_UbwUe?WMFjt?I|Fq8s1tJ+Z&cM_CkjUes?yIC>7P@K9z0y+|69Y+888yNj4%fE zlB_K)0l-XlCV?ZMBIg6Pc>HGtX9=ke;fh6M@uiTWM)~^>seeKXIxnOvE_0N<6pZ8c zIpQ&ZD)ip<8^n4e>*dI%lV4Z-SyhSSp>d-#V6IGpCcTaJKea(fyahm{BTH z?-aT$U9Dbhopw)I`4iPzpMs&#EI0(ocjLtH<96n}KlA+yePW?ZH0fzYSy|*kA3@xb zrR5HXt|%U4RI;AeeRH75>7j!vFwTu1L}l!UCop8>C!B50sU~vPJ_sz%Oxp-~C_sMl zL$BQ&D?C9ZWO1Lz?OWYfIq0Z%930A%_S36EVVAWwI`8zmkU3Ge&Vwe-bJ^A0^o)99 zYE1PaaCc>Yex`K|heQGy-w&J%=R64=Wbj6x=f8E zz$5n>&?JOV zkD@S?gbS{9?eAVrPQUQO8U4E~RwYyb8)&$+Bh6%uvmm}3*c4DaKHUvI*$_Sylp0Gu zOWBEion7Nzp9>CE=IX1hIpK5=Ufi@lAIKo$ht|>}qqkLQ;*57oEGc);FR_EfN`RsK zZu40%>xagm^YcRGIE&Bk9Q{MANJ;JRZZ;5!mYBQV;~m|FCzb zl95>^$<=JP;}gl?Dc?yaG7+?aQJjbGYMJgOYQUjLHJX%h)2>+F^x7rgWrB5f9BF0v z)?KNM7WyounS-z9gSloLL|Oee%AW1%C7t)r-GjS(kSqp0^N0iLDJ!Qw^jeGrd9^kt z5u`wy3agrMx6*9DShN7ll`VFn$Sh7Y-Ux~MYZ5rbwE%@{yrg;uUq@L?T)FLC(7FVN z$y&$0N&QK1X{Y}$l_4v-J>xj=uFaRFd7gEEYsb3>I-cAHB*xq)(SqZyTA9)^TLG5{ zIU}_{io>3-e)goB`!h6qbkO%MeBOY#5KB0;2Zipg^J#+9e`n=H0&-Qpr~&< z{mELj3~NcatHWU%lF{OtT6iZ1+kyOFQ@4Jz>5YMjDW^!~NHf-mt({{dk@%8NJ|P({ zHL$TRwbL9?&&ctymu0_6vBQSo{E*n#ch~pmc?l=pUIaMaXZGL-{oc`*gs~B*rrmU= zIWD%tCN3rv?<`C*ONZ#_t{g&-W_~zGCy9xDCFF|3w2nRUeL988Ig~cb=`a(S`MRXz z#F7RwO2Co-w=VC_ySQa(aPwf~ME6c}_dVNIml60USJz1*#4M38ri1rkx?j_LQNynv z^`E}hcp09_$_pZ6VP`zjI7&4Q%Jmr5dA~A^Q(VhG-LOxgDiQ7qau}xeB>y^M@EX^@ z5)$bbCR+?!5TfhOtaYjpUnap*#dJV=V>Gzq(;1P6?r-(XI*tc=d zHGWdfh7TzWe<^+nE&%B*oCOMPyu_0{WE!k@0SK$|g;z&RZwPLDNVZfkvV)Q$f&m$E0sN3Ny$3ZUIyW!R zZ#1sH64D6Oa(WUf$dV;^UC>CYnmEoyBVyc%#~mpN3GzFXQdJ(9b*vppRp1Ng{GbI9 z_cX{KSCBTZxYQIlDUV~wy4flnQ{FN7dDRdKkRG-sEAq>42gmrZk}q6)T8#e|{qzdR zkg4@n<>p1(*DO;;RZPvemZ)d^{)c~LxqNRI^4y3R z0+;LRNKCMW*?^5L4h44*BXNcz=vk{cP~d$l8owjrHb%cslx%ddaado08(oG_l8rcu z>1x=gh&W1Xz;5d#Os(X6U)Ugzv)FRnD8cqvSJbeE#nO$4`#zDOu`co$nEFjq1o4fK zj5>?Erx=6QMb3D0p+XODMd3lLovD#@-DLL2uXDg-RTeFFb`A!=1&y7^zKz~1|FbS2 z4X6!z$8^(tt;@i?mP*!~Mw;Efo8~q5U^RL(SAfd%upoGFeu_h(NWJuEGP;hErAK^o73{;Ts{#bXr zp$egkGDsx}%+W6UF&!F4m591<;r@{$gEBBGIRcAIan`$l@SK#?@&HJ%li<%MB$+52 z2(4qGOo?G(8oN79!0gWwMYw>N_OM6=j#XhUyDA=?3Vj<0>aD~oXPVyxJ-tL6w^M(1 zpg}Q>xz<3GooK89D_Vv^rmZwXynUOPRb^GF=mCEnJjn8i4J-MiiWLi*rGhS{k|=WK z@eFV%r_1{j;}#>W@2w(D|AbM%2+-#BHqr`Al;+w=x=S}|1`&0-x3?3P$~{H#;A2GL z<5m7-`9nnnSsQ3`HskDg4<`d#cZgFfBcP6|Nw2;j-G{3(g7RFbm!eX@&2rC-f)eWt z&?~aV!xZF@^mdja6U?rPDv*FvZhemD{%n7+C~9>De1rO2VUkw`nvJxC+=%wiFBzq& zmW#3E%z?-?E8o|VL4c^vi5;$dyBPQ@AD{p>!c8W#;{rLrwjmJJLGle*&BV)`V*CbI zyO-smJar?r?`ALwgwye?S!R#e%CY7y-+bqD$njygMaJICU<&HBlUX|r3eVn~B^j(L zJlkFHH>%zQX1Ut`s+jDmdUrFpYsd}|@?A`?JapQMmO&YbUz?;TC zGgK7@q{=;?{1v%xV-w6^aPQ(9{D@q8<`(zFwwA3sQzHCIl|iZ(AljrN2GtJ@@){(tTL1nr8M1`jqdkH-ClCmCKKV&Yc+H2bHF5xy&n4)7Tdngf`* z)_4^$sa?HuIHRI@%~LyzWngrUwx1}w#F4s>B9llq;A}ww8mq)T7&xY*73Mgm=g6`mJfRn9kt)V}&ahB1WLY ztns%Bks)a{;R44+i*!IwQZ|T~$2fzhAI@A(#@BM5d~2v8);Ch+x_mY{r>KS7iI=y) zoo%iB`q%zod)2;=;u~1`W+g$A4FNFUlyX^@yH9p@!v26qjJx`&Ld8DyEUdO2xSw`76LlDkrck+m z!{fM7dzX<`BYv2m4@$!~DP}<6^qi{1JmXuHSRl%#sRtN_eW$CbjupsGJu40v;c=7@ z*j^f5`5NTYV#fleY#IYl9f)qqiUoTGyprGQhz+?2$q4EX8KM~;IRB4xx$VBt=mhH$ zx$#ilLUhlyoReR%rd&v&4ej)tyC0QV75Q%JviSCb;<(I-N`R;YBZzgS6u@9@EM6d; zEW*g+^@;{b{?Wk~)`{r49g}v*r>M@i-~*RXr1Gk6sVr&@y;W@gZb3J;2C%FNii3*- z^XZ=B%2^ErD5t~9m4(rI~S|q~q zJHqz>FB6%N7Js7`slQ^hzKaQ}i0emoQD^f^ajJnyn)rFHum@8$@rM;aT)BDAWd2ym zCIPir?@^0*Pr>C$Q%XXuli^4|Na#B@F76@cTq2K2?+q-s;8y3zlb-nYpk&A;NCpsz zfHn2UaoR*0iK$Jjqt$7-%C3SsD8zs=4n9BFzBYB`;<#E~pmq@WBYm^L8yV|$k=xP! zZ~FmyfzIHMkby;6A9{pxKH=QJQY#tUx?5(kB&sQ82PjOnYYPYNi!(weL&`a1 z4C(2-6yQ3;i9Zl90G27zDwxSNdh*rFs<FrLmoFx-ZH-jFZ+ix)jIMu<|nfvRG#9|;oMmV ztZHhdPe4={8OaLt5#X^EQJg1;MquCUiQsS8pXVJl*S0BY&NY6$ZNOQ6AnI9O?a^k? z)w4txjAVnrck97@7a+k;3V4DrC)SR)G1T_$QM{E?(m#>58O)|&RmADuGF>SX_0&X5 z>0CoTn;?$^m!&b%=loZzl?&}=aBW?g-uE2J zC*u;#ACwy`AC$Q~F^N5}GUfOTC-TMzfMcr8ED{o^g7*j4(~FXBhC9ZKb3sOT5zp@o z|Kv<%S*f_JGbt8iv)R_=<0>WtqT;ec{DVZ+wio()?2$g|qL2dCP_G^GM^0rKfGALs zO`*??1OH*mjQ}Wex1rn};a)jY`k=rllrDo8kWVh_cq28ko0q%QJxQNLD~0tr`wUG` z+LtG^Rpk;!7#M`S*HHfxUtJJM8ogk!L+wNQq-T$HhHSYX6%U5XplIAFAb47qK8>|< zFBMxew?24tq(roqVpOma^KyFF#)sf7v4sa~M9X4CXlWC+fH@;}Z*Sd8rN#E=9xhS< z79i-N%#wo_3tHKT4G0o|Sl8}a1+D+Usp#;S4_QN(foC!D+hD%ylYeTZRD`s+Q{pD* zgm@S5D&ytPD&%|km&()jI%hFsjk-Mlee92*fd)E|8b^nDH|^^j-o_1(z}uQ|8ycvc z)N@PkR)|WAVz%>VI-7MN{z9@Jt3)0FN;{C2h2>-`%AfhyWh%8H&y<$@i0NDfYUr_CC+k&XBpnpH9^|W}FUdvNn_XG()%2MVIS{%{ddGMZ_IHj@ku%{cqjUA&QS% zi?O4)MHcJhZCdx=x~@E^-8`gszw9J=vF008GX=i^ul;b|45!o9(d-~{0vvqmc#&_| z*(lm@GS*l>e#aj+c&pQ_EUtFgd$K&v5AlY#PWbQYTG)LquRAnUkz><@n{rU48-h4C zfav2>VuSF?3f-P#Qd7tJKEh-;A`0#YhviYCl zJ-pi#3hFw=g&2tOqku`}S>^xwEeW|X?(br54HMKkmlXBT%~J{JaM6vdB7kPLoU zOpKT>eZ@`@6Yldfldo&Ky9TuG|0>kL=^{2u24PP6iURN?A|ruX{GfklOcU$L$d!TR zCD%WR&HSC=o;Fr-3H}66Ff{gIBSjRkgCW7jU4&YN#9--xZ?4*2qQw<7t`RuCNbzvik>2t_BPi1nd} zf4FeUv0WAZx|u-S z*KA%E`vzqNaa}e}1^`wQ-1r&e`asI$#DT>x13qX;%0FBa`-f~voe(>o`%00^BjB#6 zVmwwAmaJb;x-hpY`b;=m%Re-mGl?3Ny5Kdj!ZzXU{4KaL)8PC%4TXD>|)8IKg{`$nO-`-d(~p}v6FB!3hUag%XN znZ^-bAMaMTrn8})9^DA3hi1~Ebdi{VlPCt3YbWh^h))15Zu5Q4tU^ zP=+u=stgeWggHWhU;>1YgaCn%>70Js`+R@%m~6AxUc|_fwv6x} zAx9k8Z1JGXS~^u#wP4$-0$)o#zy@_p{A-V2ArVlz!QK?j9x20|>mvxpR-Fwwhj>#*b3UQ^Rr8n=)!nK8@ErC#B^g@3azZ4G)NW#Xy_L+#g=w7wpk*COgHS$q; zqoarY7HkB25=YRSb02cF4gp=awgS|Q&Le@crE(B#UHxO3id8nY+8W@bKP;NOE$|++ zay*K^WrMj5VYHVOPC-YN?gg_zK3-TBm+4Z~r#-qD(t%z2b|~7f8+w-jDFVqwbJWoU z?6#&oFXHAV9N_!^uGy3n`m#0Nb~I=)q?LF~_P+O~B|1_t5)il9bi(<`VZMTQRYw2?T-@7!xtrqlve8p6By;6wSl|(3kGd4$vHdfh8^Si zWWDRq!u=7}C-^_36LFNbtSpk@uywRG{bL&DMAINtZDS2~FrNp`541QkUtg5F^2w=g ziw+%ucFK0Xd+r1FmZVFI>{Ms7IZ7>7q@M}DyIdZKH*hMH;VO8c4MsD!!OsLv*JUk% zM{G3S8jtIv7yp4i-tNdZS}4q~pvA=sI+_c#x^(bc*}ewGNxcmUK(LwWH2Sd#)HGzL zU*pcXRxY4XsX z{;R{P2&~A#_Z9VqS?wLMERRx-;#P2B{i>9I*Q^1qL|b2W!E=pR*WNM2rHP)g^Q*nG zh{$xPzl9l(Byz46oOJ^AG-WCtW#-`CF|x@>V=&K#o|3H*lU3mZN4DwU4YPTx5l>0; z&R_l{(Bc| zBPcl{-ytv(amY{Eu{1;>hzGw75(^pHwYFY^j-6W*T&Gk)U71ctv<_DjP@~3l) z(p@g7v|%S$(zSx^#l9c@E9=&{6_TY`Y)6YuL&Smdn^XbyQh2zuIA?oU*_*wA{z;>E zsG_Z;?N0SlQBUMKU~6|7**cve@zWik$>iEFLwZTAjA~+w0UcrqlmQ&A9kn_QRQ#A# z`0XBcv8tD;({0dg4#k>6;7PJoi%3vQzw}wnrBRgE1<)(_HyK;iz5jCkpPdTI61b4+ zN*4v$LQHM7u;9|KAK)JR>CdKwKUmm2>^zAbp&*}T6n?$01j|;VjoZ5F>l2>xMvN$T z>bPL;#&k*{On zwN&|NwIev4w|U=GogCOZ`N4zo>1O0@P*IVE#Ch5mjW4=#U)2-S^=rJiFeY9fSqcX#41 z$CN7Yy2socd8S`sa3btO(VvK*ZEU41P%|a^l7g z+RQuCzA@JsnC(fA!3vGyW*S6?8rwqmzO1QB3nHU|&MbuVMD#Su>Br{wveFwSu{BL~ z=|_@e<4Bf}@yx=6cjz?r;%fO|+0AeQDa9l@_^!9@m}`2RaP|jAP@J!gYu-gPXHyG9 zz{LjQU(xY-jT7&dc5HC$#Ddq+%3z#QUns0RtuN_3Of~%@ki;dA0YfJDIN50z&lx!O zamrWlmo^>ae^{D0;!<~A%Wyzq!{j`-Y1B z3^5%`dIT2MVbv=;1jWSwj_tAIcJVOt0w>cu$kQ#*d*-yOX%f1y8V{FVn(oSUHCDK7 zi+A?HiYx5}3*ZtA=tk`*Z@;#=8IB+L2$m-WKJMKRHeIh;?YN4tWxIsrFf2!O(X=-*CS=e7C!?Ci^=&@L{IfUy1UEMB-c%x?{lG?|r?T z<|%6#+38H>X!qEi`zz_g!+PS5jx_Aa4B)In>(k;zwq>rPjPhx&U08z0(;M5YgTJ=_ zzh5RJ#8SVDzQntX_i9>|WA>8vJ$NTK6G%4cqlw9osAg*t*pajxOZM2@#~IZB^qe$xK3xw2wuT*D6he99{@Xgz^WhssvQ6UtXSb=SJrLX zV6$34=1~|+06(N=e;5mGa0ocyV;K`uX?XuRNL`_UV1Rev)RCZS*rn^fmPZS_bX^wo zwLg5_-}P&ch%ci6Pf?j$tZj)Ad>1(v|AP3nUacQ+Fb7*{rW419nrEKli@Dtqin9Ia zAEZu>1_A*d9c&M#N@$gENOLoF^s2n6-kd{sPx}SQM2}IIg+UY*#Q9D{<#!$a)}nLgy$^nIJCA>S|r` z#5~`ylx^zo2L${Xi(-I>PQKs7@NLT3JEiy8s1j{(=A$~LWT}pO;=P;u;69*CafX<_ zJy0~$kdBW-*_#R!i3Nm^5@MwobqcEkJT1V;`3u#|m5pl7Cs@ zP|B$2euY&*2ZT@smq4gsMxSwtBS} zmUc*Q?v(*CL@_W}Y+egU8Taejau-EVwWH^u-f^E%CNNQn%5iq@icY!Cm|r&+O@aj) zDQ|Hf+AH#&|KLP`#eFa03%j*vo1BgMrfTKJqjD)uOk!G$z*RHdeeFZjGm#2DG_Zu( zF6gfa>Fw|MVN>_9@p9)rU0U0($cwR{axMhKU}eL#<(ziEd8=Sgd-<7MDYdEL@G3T# zsZAi(T6i|K$Pj7?iT>N7GCx#s*RtF^97y#MU$_TY2|beqD5*>WI+jaJNdAp1-M)7x zcWDS^lrL>g+68ayE1bxEeXJj33Rw%IV;kI@VD>rhtG)f=)iXsE78Hw&p!($Di7`K< z6xf<_K7viS=o(FX*Jm*rK8bh0-*0+feJ&y*5uoUKmJ$cJ4Y-B2a-)*DoGSkSV&S}5 z#%}Es$GV%e`CWq>04C3_5ELWp4)LbXZD&&EMGV!Yl9M>O0=?#*X61fCb6|tO$GLA` zxMcH}!Y*QcK+NU%C0NQAvsTPHX_Y(qwNfyD4hR)nat zJpa?6tRTZZlpf&BrkabT`{thHN?=h<3`4#}_;gh~XrNA<+`DwcSa)&eCB;_%;+D9o zpZZ(4it=jzK;naU97>SnvvYFzq+mfx3jAZj@hJEC6Hnk!X-T9C#&M<+R+j2=#=lWH z2U5Z}b&`e@#khb^aILE8Oqu<>W z4$*isAu&R99!FfKJz`2bz6sAneO-mbf=)>L2js{}a>nM`Hy2aD(_S;T+#EdP^k)_8 z$4(0^fcwehYSJY@NGo4UC-7P?zjiUTwNDB+tx(|_$r5=hPXgQrQ_0C6t=yxPX~+lP zYkvOoDSMzOLSdy`&*2oGs(hAm<128&Ea#9suxvY80#D1)w@9=7;D`P0wF3Cjeu{O0 zU7p|D8qO9%Rfd~kc2nK+f7kqyi{`niob3K-zg7BT?!sPkW?fy?(kZABc+$IVyxDsF zAT$a1*B-e&9Ecpi50sS|dn^#-rkhd8%<#kn7HmOm5f}@N68Wk{W~5P#$~4|W{(R|^ z#;MIhY6oyMD3Vze*?#IWn|OGsen4slW^;R-9e&B}^3&+HgvfsT%(>4`fd&=q(_wBe zM^xO+rd6W0B;tNHi6=C^*T?NvH2=FMICC{i?qd2#rI06ZL$Hf{X!g5E=HMVaa(KWp z_fy*~l6w0*Je|gR9U5xM@iJ5kWo;t3j<&-M-LB_3Mjng!ZhA>AK zGhpW5ic;N$;CD-Q#)L0Jmt0%;nr&1ie(95FyzN(1wM*YLn^1waC&kTB5SoH!|y;55gRf~x}D-{u>q z>tZD}t7qo&ziXlQOINTp0B3qWI*XKB)2-DTLO=F5q+O{&hCoWv`fPfJ>^PTkePU~$ zrF6IldzZIq-egHDv-P6WYwN+0ajK*yP3$LbdDw*&MyqWGe^`EU0Zv)=G5OQutNs-^ z0nJo+wF|XqqDW2Ao&>^92kf@Z|2#KjEuRv0;H~&F$)V2uMpAo-Kr*|vKpVI&8whu)O`?b)+Zbc$7QA(ZF2BDBO z!;@7=O7YMbYs2s-YLZ)B9v$ao9%69yY7`v;5U%H7VUGrY?}PQ~)kW%N(m&Yhz@L;tI$M-H@33S_w&LL#peAthipj43hR zcO*3#Jk|_$kLCtv)sYD7XC*(b-DV-Zs_`<)5~2lqHDh2BcM{ZIERLjH8N-m89YRAC zyw*BKY;iYGPI(>R#|41rfOl1uohBp+q+V>AFB$!+RCCPHf#;`$&B~HwHE!iVYNZ=} ztZ;^Ab_J8hIj9*C;)m&aoIG?5fhnrT29LxSdzIZuCbYID^Q@#!N2&@F8ncQO@~9{O zMlnN4b*n|xLm7r-0Kk>v#{Kg1IZva@t9JoDdAK$)o$$R7d=-H(<-!zC4Y}HHgx*!6yTW5F-HF{+yy1tVP|7` zv#~jF8KrPCdhoZh(ElzhP2?O>^ij0UTFgE8)3-yHJ@XDlgIyO_6NYHbj1f(=?6!9C z;@;(K+SLp@il^tERcG+0vVoJw zJ3-+UpZMS{NUt@$pYiq2U9EjB(5#uaD|m1GK~}V_b}!>j3?nFY!UqHuI^6bdYrLYb zBYxO2z6zwtd_OT5H(P#%_PqOGM#N8`R@CH)Jn%K-ENS$S@RQlkt1?`4tC^_9C~h8B ze*~=F7V>lLi4?btL+%ca!1-Rq`|7qg9a$$IkW1{<7immz+_%cN!E(PFJbltuZZ|NY zUG8M5V9Eofgn*3Bp=Z4|Zoln*xm#dBJieTm2#5ORk|ZQ0AAVnk@m;{YCFuE%ye-jU7Xp?K z$MQxS{A19Apj(^^aJ`WcxxxXl)dwkhRn>sJ1n#n4i@JOlGz-jp@~QKI3c0N|T1haS z2x#KX#!w>1PQktCh%^`;um*}UR3$jr`~SjW2FrV`73+qV8xw7T02Z<^aeaz1?|(E} zlN&KSC#Np6WPc-7=uyTJaDVsVby1^7c3P6Kv@^@?R~V{S9hb^hJTomxTa^c7o$i?A zK>hSY2XWd3PS@h&q2{_l>-FjYjvDJX&seku10Oh?^0D#$-r7F~-0=6JcpKv|?&$)F z$fi*|TG;-S7o-V>dpm&>mj(5dfDAX?gZMk(ILF6_SY{3Lj7U4~vDMjofve)<$*L6q zpi^1%x1cJRe<7-1Ky#%CTe;3T%B_UdOWjyq2ucCN(tA_3dJBHSTDW#bu7SXRI?r0; zdIar%Gfnw4mzY_aT8kXNQss$^njaCJAPG^>+`*MBVDDFR^N#8LeN_n+s^GeuCAK@* zx1!5HCt{rH|E~oeb~YFrk56@s{*)LEG7^wV2Amc2)S`LsC0~%91(I45oWwugz|pN+ zX?kEws`-E*gQNM$Md9EN5>g@fF@qR`X4#jOJUUP&5aaNzgr1M7Ms(8s;t3&q1p)c>;z?h4@trZjffrL2xpr6;|Icl6-MNGR``Ep-$Q)8{P$<07-1APwXJ zSP2Lv&F{nz{nU+y0DrjQuF{_}4Bm?itqRm({sx3h`f%GvN z5a(^7(HtLe{F5tbCmYOuBm<3*z?U7DE;i%DQ>342CA@Ge5tctpF}(%TGN3Q#s397# z$URq%!oD=k0g;%_-^=_bfoxyA;TIqQ$mXehW6MKKFSmkC;HFIfhPE}QjpBY&z3nlC=xqq zpCual2+%;ZHF(gWA?W1-Zy+dt4mGpToDrVy|BwU#C-K0FMXt^cY}yj3J7H&dwzGws zIYgXso#ZVig$zh}azvf58mm1>z3VNIx9pxv+gb1IPtw&mJH=_@od7@kB$7~zq{G*| z!Q-fvQY{bHVz1{SnAU{Af{4#JK1g^cgX_h`@6DyFAo zk&CPZSV4LT^Fs@P{ePUJJLhW5Nz5sq)=14Mr9kS%ycgC%F654#at6i^QI~{p5mmKr z&H&_RE1s%#;+=z|RILifL7H{~i3l;eT`7D!v9+qAie5=qHVV?tvRO;OcTg0G@r=EZ zC=@ZE%~`?2cm5uCw8pfN@7)V8s7*mI=No_oGml#1OIGGb1d%@@=gwcLTV0WC_-cSv z|2S8`OEb-x*nfx}Q2t2QCl?jq*mvfYkiZA^0w%vW03w zP`(xDDB(T;cZLqvs8h5F{H0;G#1(an0|NDM<;v;iDu23(G2s?v1%X?)?Qnf8N<^XQ zAb}zEcg8|yTvA++-{QY((rt8gq^@T*dL;_l*@+$j`bQQV#`%(}n9qrUy}FZBi-wbj z$%cDt?~$yu{~g{NzN`s1KO!Kv>K|U19gblvTF^~Df?do=vOLY@tQNl_(_j_B{h0u7 zy$e99SAf@srYLmI34_p|6!lCs7M4bo5Kq(ko8S(TK62j%Q`O1Gq(Ym`lXYmv3|}p4 z0<3yw3=`Rv!5q{PUI;H%^aCBZtpn#@7wkA0h**o44n96fqNcH110of%%7Z%xoH+ML zyIrHvhB^N3YD4_PAda0TzM`p0_`76JCj1+`K1gn~!oIIFO%krg`eH_AWQsSrSmokB z%SKg$9LZtYLJIno#-{rnPV1$lIQF|Lu9Lbg6gc&9p*B9i*9UH6X|8BIG}naMQ-^p` z4{MFpLa04cQFyub9ZqRJ$1=4RKo2tZbQ2sY2XMY3^-CyBW0Var9JYcueP1s&Cq)r! zPz0uVBEYlw4qxfq<0Vv_xK(l*$I(J#>`ox_AfG7wYjVZz5{!;vZH{B z1uY5`i^|2P7y3zTz_KYb8Ht1!x1K@RARXDNd+yyTfR{qn7pt$4)!6bNQ9G#!xB;cP zsh97&RcJd@-NF*y&|lx?TSP4uG!IeL>($|4d-c~Kk~5|(A^xxXVrWSqMy_L4#pIs( zTiIIb(`nPxE9#pwC~R&6nBXNIBq16@fH{E~;o&$2o zjH>WK^5+Y#*7rGj>9~>2`Cxo}S#$3uMhjd5le!}1<)|Jg(h=%1#X^C=~lK#~oe1s*E!)j>|VgdlZV&~Jsmq!Us| z=1Kw|R-Phl(MBz_l=FEhwG@C>sf%t$Od@C5t)#;XH5D9iWv5%3DnU9|eP|M}rUq@@ zhwz^;qX+DuPbcO!zw8h1M|WLcoqBKgnd;R7vuB`zLukv>;Rr=ExfOW6Z z4HzUI?3w$d!3fhI?`K#~i~5BWXP=}5u(0d6T~l~*Gs{^5PJ9#-V&eMyF z?2FYlre;9j#9f#hkgz}bg@I&qzs{cW<#c=FX0&BQ=Gz~4Y{spYUR|p=ypT=i9@;vfoNDRHCs?_4p1?HuOK~i`4vpIw6TSpZ@(L_z8f$JA+Z#3n>&o51pnqR~5$R#MY zdm0wA)bn21QMo<^KYMQZso~C*>mGT1b>DD7k-`y!P%q0>7-pMb3U4%&=Mgkdz|Vh$ z4&uvNE(6D0EGs~_DGDy{q~Jz;U$S=c8i8+Xr!n#fK)`@HON7~ru z+O<3Uo(ZvgUEQ4s>1SQGvQkQ}KsHc2cmA5}R^mAuI{O>Zqomb;*W66DbM96^62H>h ziig1DIEXU|COe9KmCf_J>fI?-ey;?`%2Sfzkc|!*SL~d3MDV+}De=mOZLa-IK1OeD zOL&Vbc!I)#s!~^J^tGfA$2@nH(5_-)uCM#xN9dkO^5mF{E;BPa;+01PxMk;tn+XCZ zJOmC!AQdK^k%cI2kOZTXc#>h=7{~rXiP_Yje&oY^jwA;3%8EUcB2wv%?@A5XU%vh? zu|d@MJfz7@ykBOIWXa2%_A}+BTjP>Z zg>4QXzE?F7piaS2TRjC4>I(xO&1Rh5uMU;5IvE#uE2|lo8dF;J|Ew%3HFX9P1cO#z z3^g-%=7U#w)X39Ic~Nfy+b5YJcP@svx4+4VpSVi7v~N{Bu8HaW{-r!m^?AwAI5jXWWCgW=X;TN> z8E+s{st@xNFFl;~O zvN6L|kw`+6nro|pPdCf_A07@gPPtyg$Z%UnZK;H2mH%sZBxxIW3b*YGp&u@^@%+ZG zGv20II3h@q>Md}m(AbHBGU)7q+MNH!mK9_S&+!4rsO$v9-pQ}kKSQYO z-ZNJ*(n(OjRz1R_L*ZKS1J)auO;+n8>k=kW@3Vh>&P5D^>|9r~P#R(ide1j_-3_5d zRrR4T(T19dk~dj3Ms|mEz7BLdbU$qIT@V=wV2qaS=cGRcqall_vY@4#V0hGkJBJNH zfbxqp+C4FQ9$HxcF=X+TY47+{+DxxZ&oKVs#I-hDaK0hT5;FtG=lrjI?yG;Jc!bi> zkbJOc(ZArAKSn{}2AZ+=+L=$CAW_%W-<69Z?HZ>wzq2+@J|$UXmEWA`F8yXOXOt45RAspI!3A)T|br_&T4 zkPsfM^k))m==BYBFgpN^{GtGw0h-I7gUI^+lA7mm$n*=3*QeiIUN@Tb$agH5hm!8* z6(ehb{2beas`@(O__lJ#Ms&lIw?4LI`P4_If!__j%+XKoqBpluc8boyMW3ezE>iMH z<7hGnO4pX>_Iu>EooT(%sheN%g;Y2)qX}=P6}GoeGX}}Z#hES;-bFyN)#&%*rlTWw z{`)4ETaI%@6sc^Nw zOK*@Q2DikIkPcYv^xosMt2Qf?*2LOhtdhIFxv@nDaW;a;%YM-EPTxQfs$J0+If02@ z$9FDe(OQ~uF?Tq1Q~(%Q_x3goUu{VKxt`H0djoSk%&A}y<$js96+^^EA|iBT$8cZXBL{9 zn>bie9>Mycf`u)Au08pq-837(S{E3_llbkfk3*F$)6-cqOGjqh@x+tEo-2n*rynbx z!2JwSj}k7%R*;oXh{QOJi-OQUPjLQbjv{-;1xmosw>A$5m7xkoh@7^Zh|?W;P};kX zupiCAEYGCwIf-Ky&i%W_#NW0J{J<%-H+&NdhXn8PZ*^+^Tv`kAJk=5Pr5hJM5AyU* zUbKzY+4DaC=lrV?pQCE|C^T>|xBN17`uH|c>n9x4SBAe-Wx-FqqeVA7=Oz}HM**wXCEY=_xc&tU()`QEa=+bmHTxCvYK@n zzz70zQq17`!Km5L@+TGV?=9K9dBpXYal?$vgJFn^N_6d9-0ouR-30iFq1T%al_$ui z{cf1tjaf^?7kmQhvWk3h%|ysWK8w5^#>ME-B>3vnzT=kz=&K|*MpqxmIG5SqFT;;u_E7jzTaYdsdDnC(gK;)(%=R z1e1%hPECkdbbK47nkwJtw)YX6mvn4Vhe$<#guih!VgTETMV?1tMnD+F{k=L1+qbqX z&d^q8hN4dp4tB~-2d!HAY`JzuOwS5nV!I1QJyqGI#riFbSe0&O$7q}U7a2{gJB<_x zSs8O6TGct3_4%Sv$kd+Ha5|cifnOBjR~Z2GyR4oMvPn?~gF0U|SlxyTjLDTb_ta3^anxPgO|$j>Ma%pd zvon&kx^ISWNOtd0gixjbArnq6>)Kv=;wEfxJ-llvDB~0{pMaiHV_OTqJvVdH!-EL}YQxRIa7|l0=chEsYY#J)ggu@}Tgqv)1OuQkw=NPXbMaqv(Ceq} z@}j7-C{Ekt9}$f>LHvu#pl{hy=A)M*5z|bptc8u8w_{FX)UjZIOQ`ZXe#O1S=A0{U z1xS|?q2zIuYi%`;m zv;GMRGfv6b{jxu?AV`WK`&O$d61+xFeEpHiUO%nHF^+Hsd{m#byB=ktpzd^83#O?g zhe5OFfz8gmi<+}+PxK=?2&z19Qn;3w6Yesp-?r!ae3a6c=ey9v6J-$dd$uZa*%db! zVIn}9|KCuXbj<7cjTe7K1w@?q3pAqug>(jT3SZGt##{=#5vV^fFP*B(!QTf37w=56 zK+;5Bb^tR0%iq9LrjhxNpA!U9tNx?mV7N2;W$x-|vDS=wy$<9eYc1gc%b!d<*&OF* zFY4>3r&o=AXz_du2ER4%Xo2>>8Em&DTc%O-Fohrl;1iBu4NWu4s7PLsI<^C2L@1oq zfAl^`$t!a;{dLU!kk`1cA9sXGhnXCGo+t}QS0{NL_f<~M{(d|X*PN;OFc2!b!3qgq zJiwp(0pPQMkR8aMSTKg^YYb+rE$t_x`d3aq^zD=vYc2f}X~&>3rcpcRu2lNzNRuW+ zeeLsNZtOVB$CZ%m>EC)mceRXtUZ+paHZ>Hb5q7dmZWts#Q^>am*9|mzJ@Xquo>DcN z;G&3lfHCRX_U9GOKv;X$%S9=e{eWB7 z?A7Oixl@*h(VFG_wgh^2Co=?Xxa+l7oZhI_@bze8)!TGKhfu6%V)(^N*-gJk;v>xI zctSBkMTueP2yh0m9c@8RBZ2=uZXb~-UWFfZkMG8d_P$x4YIzgtH&58s11gjuvF;&>u&;HlT~AC4d2ldzd><<6ru)k+Aqca!!<<4xf(&CZE{hWIS? z(0EHdM!i?~ah?t@%Oo0>v4imZYIj8G!s@$&y~lO88&J6i@eiRsMpLwk2dETKk2ElJOG7Mavh&JWC+3s9%yR4!a4kuheB zPvoXqt2jTr3eDe`9I+J)LlbWGb=((X?$-nO{vUpK BuIc~) literal 0 HcmV?d00001 diff --git a/tests/data/humanart/real_human/acrobatics/000000000590.jpg b/tests/data/humanart/real_human/acrobatics/000000000590.jpg new file mode 100644 index 0000000000000000000000000000000000000000..15efbec533400c7a584993738b7d0b175b7970ae GIT binary patch literal 76762 zcmeFZdtA%^|3AEhkV>)^9flA(Asu9GZ~73z-lW!fl%zr>Y1P`$d8NJasi-uhLWfnP zVr^Qjkc^@utt~_cZPnJRw!L;eeSg>WyKcAZuj}^v-EPH~$K!r_ zrs`Dz0faO7?tky&&x`!;y#jc)>J>tF$%4}hj;O1xM=a1) zQ`c2fwII;oeJ=VRuZ{RWKhzeeFI==(W69EGn&1JoD-a9R)YTU(RA01c;X?4}1n}>O zg}RGYuG_w6vEH#$8tWrh?YMg5{u0x@)lJCbAH^GN0xn)#x=bHsuzJnLO=g?7Y~5*V zXYa7f(fMEd_PZQ7c*yO9y9dG3%RBIN(3#+~A)!&1qA$mgV&f8%l2cOC(rGtu{X07+ z_x7E<2SpEyOG+O-ep2)FS?%+>`i2+W<`!OS+v_)PJG(x8?(XUB>lccKzkUBP^7GfI zWNLawDw~Do=KqXK4Wa%&r|f?W?Ef(?T`;Z%3m2*{)c7+lwFNQYQrBI$Xx;Y3EB74J zI2Eb4e#g}%tM=ZwU){9S)aJMt8F2CAGW`uZg&QS*hW1}0`+qmEYyV3l`=0~*zsJ>& zSgx)HZl1a>0*8R%%@1gZ|Nr`5p9AxPZrjG)%`dQ}B4huvGZW~4bxE~c-P!fwU;BQ= zIkxYdM(_Fduiz0(TmivlAA)ije$NpfXY_^3@+HyyD`a(%rO!c$y)31)^lV;C=CfF` zjdh0M-nC!a?%fS_0V=ivLDZbvse72WwYrOwYInQ0loZB1qE3;6dT1M^63vO zry)jvkL(s3iV9L8mVF_ihg++`7rzIZX*-|ZA2TuPdGYhFlTjk3mI}cQeATuKp>00@ zTA7av;iqtFaaT`Eg0G%o)qJ|t&KXxB^hhp#&vW=K@XGFmST~<9KDVwtY~NPfQ$g>$ zaN?iUE~Y{UA6oDo(Nm+c9c4*yXH!5m)x(n*9Z*&8>&AT}tQr{54C?rv=69E~cld%X zH^_io=I}<}%FAxIW)g`}VM~MCd7`S@=Z-fAR3EgA+C)j-qt_ z1M_hPmxyVP`ej8AzV=eN>xOaK-h-~|iO^x{;~dXZg#+Aa61~YZF12w^5U($$yKn88QwH+`sYP;hnAT;A!5VPU7r~-&fWRIABd{^^B=agwF-$$Y)Mpxeb_0WzJyWzMH_Str;} z6S`bxCbfQe#psKpE!Z`eUkM``{6muCRfrlP`pd!| zD_)O&_f*WHZbr18glM&kXf4dg!bnoOh1Ke@4c)6Pjv9tP5sBY=9zKpA?ah^5rhh)! zY^0WT(O#Y5E6JnSFJEyY_?`+Or54tT&!nmli`FY?ww>K=vSe||l~IP13UR%HyiYv2 ze}mCC(k+pFD#lEBGj6#dD?Wz#^pY|4UlJ_6c&yOm^QFe9{h!Yz!*|Ml&VCG?cULBT z-PBb}I#734$LQMEZIrR|@&YS+jq4eOI>_YKjW4;Ps@^iPcQkNeNgpBvCI_8(U`PjZyi2v}V=6iF$hS~31Sq{F}9HdP@w-*+2+Jh&A z@(pJ>>YTGo=#avx#FBB9z3E^jRQIae{6Wh+E|qnvN2H(Fq(acIERncei&Kvgxn5h0 zS!ts@boa}O81zI?f<;^wG!29JjI{BuRo$fHY#Y9(y@N)=8-#W%C*WOV>bmy6WB_8(B_p9nTEf4m6$V09A~5})LLMh$k~tbmMkq% z`6ABKi{(t4*Q6qIp0L`hP|pTDD^wYVyg{uIAf?69a^_C39P107j&j6zXkLY=F`_%= zm$IyAj1gLAI$r1JGEh#f;de2J{8BgKVG0-bjL{C!_ z(}nFWAIV3Gp>VHnTGVEWk&QhKCtM||JHDN-p7|{yL~cOdBNl*)8p26_q0`c?G(GZh zhKN=;Nlfxdd6Zi0&C@2k&9J83ihZ^(NhArk;?0?%%^Nb0dZo1pCC?)NJt~aYRj9F( zdnyVyzYL6?TZvi(J-mq|J8@-ReTDa9NL$JR5;V zl%~6=XY@n?O0fmkUnJr6BwnJULj2c$kSw<$g}3Bk zz9}F19NyF*zN>sK+0mDOo$|rkzCRszla%_(V0>ElipWI24O2=JquseId^fuysbdGH zLP)7ie%H^j{~W>b;+?wmk#&OlV}qiIXR_dx>r zgR$EptYWTIA%5r~rRAHfmrY8258+o((Zq$=)nE1upL#?Z`MZ;ROmuxyn`c!I*5GHI zKyru_@6x9DWIdl3sY2|CfsH5}@ztI>a!e;VP}rxi9#$bz1X`@!VM6NMV#WTa5H;}L zFMkWTjjw2r(@9YwL{%z865(gmiJin0Hw6}I6j8Mmhs2Y~pbe@JLLHivuc0_14o*2= zJE=`_6PaDBg*a7Y`(cju3KVuLcrtxY(&^3M?^#&iCGoYoXir;~lHiOM7lfhDLd5g& zPju?2ul4!PJ+(*Nj<>-@&I}@#lgHYdm2gY)d2=B6Pdw^mM7alQw4OQ6>KY?@gJu8D zTDd5J0Ds4e3d*ki;&1NQQEY{?2B; zDI3M2GPtt&8E@^EWv+|ffWh42Ml>_`kT3+<-t+jBgPRoB1<$M^5SE{|Yi1U@>^g39 zWyfC_y-zoqU_-K-I9K#M`B4*?>m;nz%MWZz$lM3J_5<_M5oft6;Yc7mQRFLmzirR! zkx~qG54Flnv1;q^Fd=0pa}ni3^#Ftub(BATaIj3^L*?2QkM&nBEW0mA)9+;+1iAy_K|uVrXaXXD9_7Ikk4 zi0D1+VVvN$lXUnfO7k3F%c2TqiwClb*Q}~{OP{>@uF701>FxAkLO18B%@=bIE~i37 zuru8G@l>Se;r7wn{E9FmteNF}N!T6u^~Zfk`5QUuKit>DN~wa%SxYksXaAVEHd8P1<{HRnPF?)^GKr5w9eb zE-5X6)m|$-56^C)&A0POfk;}jt6@@2jv-Q7VD@=9v)RIGlg>)93gI5wVLyjT^VM{< zDROam`*Rrg>`?$!=%$%1+L-(<>KzkFdpl@(Dmdl2nI^#I=bmcP-d20MV(iQ{;ZN0` zzDLcptbh0j+a{cb!xm0s+=@LFN1lDIuruDPLj2vsvXy8YnNT4%E;9msX#asxjCX|7 zfwA#8;zabl5OiN$LR+(OymM!`j|=?elkN5RJxI@Du zT`EMdsR*spM>-^$ZgxziO-iGv_aUOl`ikP1FkEMpZQ7G&Vk{OkO+wBry)PD^{&`Z^9Me<1_jAEQ)=m0dm94dM~B`1n`hDFITs)tmF{s}YpPHBf@k7uXVc&)M7iDy}p_)p^_sBS{{@hw{{ z2uuHY{ja12r6Ip=IxmAwDC(qPXNgr^DcMw_pRc4#H2!*)3nN|9ETq#V2P@AyMGF$; zX{#k3I1v+yivzuI*$kcdf~^JjnQT<%NNwWpg_<3MLPUmd+*;!=w2)3&kJ@vC}=%_uYU9{&o@rzJyF=V5Ix|x9g$vnr1Dgh)OW`_ zC^%h0Dz`#u8TmWksY`AVNj!@!JXU)8N=1>o{I`fM@<-+W#IBWmI2{x@aAiAvI?sQ} z!WBz~7m8i(%fQ;gXQ>dU8U2&Oo{3=b>`@6Lapi3De7|=~;byT;+D5ea7DPCq{G?}5 z^akDEzqWolxiq~3EYQl<&WE<`-rfnRuUjM3-X|l-$vo}X# z@w(p~mdn?*37`7r+csNx_}n=~v@=Ae=apYDpFP6*daTt~*co zt4s6&{ahkzcn}uTKPw}qJ_FEWub?!UI*M0Db;-XdA7B?lyJi1C$3&A!ux-+H8)D53 z3+d(|!l#{nwnx$)ks&baDOmIIUZocs}ONvqxHqf9T!;yc{j?IJ1+4gFy+s;1XSH- zd7zN{r-xE`n$S+B{!hVbYH<6@Mt_L-(}OC`Cdq{=M8>7BMo~h(iB+Qrr%7Kt1$Gur zoJ<;*pPVKVA(YRv!Zy5kmgnd5${xL<_Wa%>_MQ@ZF2jhVBht0#M}!T~wYjSVRXpU}%$yUvp@HKLC`f6>FKWwHflD~5jP2}<&?F1F#c3Vmj4cA+*TsN_3cc*bOUz=2^#uDuk}WmV~ie zf)i)=gZ8%z+0dj#N20xzyPNpPjTzL@(v|L)Y%;a_c>xv4qypQzj&0Wyf;IEbOIb(F z4|~s7ZUKz7+{3(Uh&7wWvK{tiZjs@Ydqw44nmGYAl`YT%Y|V9))BE9M9Oc>Q$fdi4qdhw-aGoD4D{8gu-D|5jRqU%t z>G7wm`jU!_R{X22kkL5uPuM*x4dWx!ztMfOFJ_7FqE{XJi%Z*lZ^3`?T9P(owPT5f zMxFn#0UCax0IJ-zf5Xk*!4r6~knA-yf!zslhBVDfKkl772{m)sm!2LOO4mmkmcF=s zYly4;iS_#1mj-m%LaAY@fk|S?5(ksg9{M@A)%$%m%nY>bh~uX7Gky#y$W!tbkW5$dX&VP~JOZYaV5Jx58e?zed?0e$S2` z+5$S0RvQiIOgOD-j};a3XL+4~aDk^Xjfr)+-J}?!ufGpIClR zLQqs)GOpMww&sX+u2rJ_^_`Z>Hraj8T*L2YuRW4~+-(om?wIW5?x)dEnB2kF`3a;x zbeCNmasJZy&u)~cQx4uOExu{c2;xXgg|D!ND#ZPBfz4F$?GZSYzf$KIvEz$-@_Prg z>8n#?#NKZO$Ff;g8vz^hAkcW!Al4BkSg4Xiu0FQGfph4VHTdoGE|OZ`?vgY0&Mk)t zizUun5v5jnCwczIYOz^by>A_o(_M(daOKqTFNb~TpV>7Gxh;z$WMr~UA!74&VTKNh z8bTr-{Dk{f2;G7=LmX;nlzla>ZXjZ?Fzm8-Z6GE_}dF=-NbGI0%h#;kAX`(=8@wNnHlU zDK<7RR{6M?T`g>khjsnpVKc!uiL`8uOG}f`&qMHce7v7q6WfCIQL#~?(~N3%#FiF8 zx6l%&U8e9>=E6+qm2Sc9?KJs?^{+1$3}_HmFJFvOX9Rf?p*)CFFeEqOn}W2^U2w4s{dY6-JBKKIizC!1q0 zKW5HS5Zy}UMsqhuLgpY%YJZhSUx8L6x31y5R;uVjdlpjc&N+wVOlF^VfJ z`(gF2wS^H8)Dl4|S`VWx3FkV7Uxy8$r_FY2wU>{|1WIf~Q;Ac=6p=S+QTI7i`nUtM`bR-|J+R%ImD%a*NTt?))KnMcnKG zsFq3U5-loxTy}OI%Rr~JHd>pgi@)?E8=1*X;@q@aWqhlFT>%<7Jji*xS9||oyG-vJ zAy8)(cp!6X4C4nfghyXWd}MKw%QuFU>)ZS#ZK>V@-r&+stGg?mT>L)t2VCO%nA&@9 z=;zgBm5iR21Z4p*|N_P+_YA5OSxm#Mge?Pj((e)Mlm62wD!X%^$nM?o=AAw%2LL9ru@*rdbs&0i^MhS0a{0q0D+P}x)9c!z2ByHMymN{`0 z^>&IgaoEEgNt^EmpWce=nG=V*kf1=mBgnMPM>$f!fwueQUS*s55=qmo9`I)I^NM>J zd}J63UR5QJk(J6W0PM9xpupFIdR)ei@2Mo~Djz7$%XQ%oIW^?9H~oTzIG)d0X}F$3 z`?Iaic!hP#nr}M-{ec7>KN1Y< z;FZEi7vZj49q7*fQr#a8C_utFHNfqg9%t_41+Z(uq&ce)ricHlP?~Gz zQMvx+no?iF=Sa^J<2|x{;uz8F6|56U!x_pI^Sz$;kT+HiSmeYA)`#lb7XcDy*I0Bz z^K9JL`|CAd8eY5Le8^MO1+e(fXBrEk(W+Jg!g&60{FDOEkH0&1d zP_~8mck$!1FO)MOlBdFod@cM@sN>-gi}m3&j<%T@?3$XyV$3{c)F}4X;(}P9H%@VC z1j)ozEFDQcAOOjf%?ewIC49b>`U{*WPUL0Ur+e9kdMd1Cc+|Jc+>tpwPD{l(zIjYbxd$PW zIALkwvGM!uQc&seW1c>ugfq0w%)gcY{_kK?!+7dY6;X`;N_^#y>EVPU57<-JH0V)@V4FYJ5FTl4yD{_Yy%X*(waEYF{hx#D zKT6Ts5bb@B+sEjahDwx2=VV1ojw5iMTr9I&^S2h-eff*>j(7CHxd=~!7$|0obd)!v z(S4f3vSOj-rtZ;UVJU5#PJ<0XXM-KO*1mRJxNsP2D#j~b_zvlv-7{>fcqtlD>uNWr zmN&R>lwvMhE16(V{#9S@!SE(Z|18?urOyqci*O~1^}|o@WFwnbR}f_^eSPS#n*f=_ z?Nc5DCC$1dH%zah|J+>jD-(xqjE-nGaTUpX7~b@p`V-Y1lL&7ZwD|~jjk*ZmJmbK8 zinVHbpm2jiilOGFDPK_JmiVPf;M9aojo(KZ;tD|D&|1Jua@jKmcrKoY<;ZrBxSZXW z?x+y_N@`((1d5Tn4#j}dP^B=x_p5t8`p4sF6++X^6Y`@z==;Ovs9a~UK(s$m$bk;- zM2~B8BtFz|61}idJ`D-9hB;43`J3LA%I9(Yw|%WbfXWrbO9=Zj-bcjwWZtQW`l2aj ziA!p%j&6jpuc%J3@r)N!MpXN8rvx}@-f%m=n&>Vhcfr7g5V|30Ldp*Aw_PPD_(u(A z?oXxjqL`3_^!6yPO6vaVq-^+UcqFONjMc@dwLM{s6BW1OY1HuuzQQc9)G2HRFWV0# zR={uAR?iqi1R7eTGyYX#sofYQ9`WF)`-$0A`*EWD_j|;pd)==}dafQoUm%)HFyABu zyJR$*td~$zo+*#^w0Oj6q$>#bJxQf5L?|1&bwK=^MRvqE>oEJDGw!Q9J?(-BJV(K` zXw;`msh&57VeGlzGs8+onaQ?IGY*rPrzxg#!D^wC<8AMTSv65rgNKcA@_diX-(1-P zs`jY}&V(m%ei2VddB*6jpiP1bg#s8%#A)(ZpZ|N96hqrAp3PiU?dl*L=OdH3r0d+t zF$_NE4C$PQp(c$h!J?K+JpFe7WwqRtdxqgo@|7c#J>o%g_iNlOlBv)R)_O(mOsAlv)u)E)J>xpq{_}O(=98BRu zD^4yo2I?Jcit})?Z$S343%l`Pm=JB8klRdgysUp2bwT1%Osd)Qt%bCLDMO|8&edsf zQRii~-xA;WglD(cGgn{j*b>29BcxVAH<&W_=XyK4@K4yc3n3r$C>23sauu;TLy>np zf|J^gwF*WX3UvdhPq$xYr-Q73?@>cNs_fiRJ!Fb!xs_$WwtAXwsI}f~@v39Qil(!RPu4pdt(!S$wvO_pceM)fPiwvkvD8elq0kT> z9G)7Rqsj2lR_1uJ{dj|Xum6}o>RA{ynW{f&i#H%y3|MEEoONvZ80;^`|AnD1oxe(l z_F!Cu8S4_y?jxs)Gg|&8#lPa{MUPF+RM>Ob7Et)GyxkBtw=C2f*DQd%up8he6{2B) zM-*R&aDbm4%~n`vy_vS4XKV>_D+aoqYb$9ka5JM_)F;0Js==-`tvRQKha`BX(R z_KSrzXM8)H(8%UQ{qi@gFXUgxPW@D-!ZzMXZac}} zz|r#wIkvhhTq#im_BT^_&OQDy??&?jCm3(0ysldZ%%5o z!^tj%+x_1B_6pl@+X~CY4Z)0wBb%ughT)aluQ^7~GP|v{MhERP{3PG$*UPQ)-moHv zb)bJuvYVAT7CWgElTgVFdL`UQRTA^5Mr7+!|FA*R9D!fwGrZ{$M_FFobMJcl{?{XN zO~B8yNC#^WqnNO|S4qJ?5C!nvvlLv{5U{3_iafip5m%SM@*KN5&Tz#sVcmeXFd5jHC;#hw2?fu zjEu7nREU2`u7Xc5R-;Ipx+i()M+=whHO)ADDP?s^_|s5!BbqwqX#tQ!?{=g zVcMxaJq$0~C*RgC_n@?!*o%**pRA*M3 z-&BZpX4;p+JIrr-vNPh}^e2Plzq=I2io8b|K>+mvX=x5`+V{T-s5epHh)?iz{T)RzNgDjwBO{M)T;`F!(UhU9*GdO@LHypGZC;^oxH5?u1j(SpRG?*fn`lEhBry+Tz0|n|lc{SL+07l9K40A&CMldaf^h6a>U~i6`@kh+Uh^mGY>yK;6gn zQTsVRto_^Os3J~sER}}q?c=2OopHWFDyZHS=;^Q3@||uzY+)CYXTekkzm8}i$gT8t zw%fR>5aB@pN}}}A|KzU9?c9?+K>`Cg!pP_e|9M z&TrTyVt;+T;^el-AF;_?QGGiN*9W8}4|8DnRs@lVbJ@ZOK+2Q3Lf~@D0WciI{T}=? z`nt4n2G=tM6Yo8kZ@aHCz?g>PXOYv4WQyrh6@r1ku)Bx#%lmud%q~7>ju&$EUD5HK z_ibc{J%|>#CgoX5DGXrU^HuXlT2saLq93IkJp1f81qHQ=W2u2T>p;P^PMcqH6mx$l z{}#QrpC{WJVX3ApFq>4XyE+#y8Td5a8=MEbgx{+xM9YJrp%d+BZopC`UVFd>Is|dYQF@BeVUFsT&G^}yt2rV>{xC$ zZW{2cq+6hEUjkKPY6C<7n_IC3gB~}Z-vlfYwhGbYU@DpECO#9qst9j3St@}f_Nf99 z<*sOg2bEaF4m!2Rh(x9D((wNZfzzG zP?LzBW~V|(%*Bn2ESxA?1Kt>TTul8F0?QW*6sB<~MLPwFMT!L}{AR&9>Zep<2iD^B z$Cfi*STpqPt;i~zcXPmb;1|}O2mGimhU(5n!QcJO@6V-vOl&&Wp2#{}lwOfdEyr_N zWi%V?LLN0#7*X%5LTI(95Pc-yR13g}K#DUF+0@IBGJqfy>G?-8=LKA>%N|SNDLr}; zIl|NABb`<(qImd~iG_rpJP}@-p*BpMXw=|RwNw8XCfk8+&4uoYm^Z>(^ATViT5PPpX*`2bMDOw=WVJ8(b$1s*av zN^%*GT735&=}%OxiQ4z#2wfH}r7O%C;69!s-SSi+?#-s#POc;oS_ijFL3Su<8nlo3 z(#EjqN2lkk)~9SGolB(I5S)o+L9zbkDB9D3F;JUU#Pn$91u&wdf>rEn|1A-0yXOai zijwB~%E;Xr1+exVq6sxTxECZQM*qanG8gcaw|~L%L&T2%CJ$+ z?|RVUIDL3NRdkMZDN9H5t|$^lNlfrWWiGseym8q2k4CFW0|Cp6M%Ie`<6U5a=V`nk zlJ6faXuDSl0dAKvy*pX%;ZK@n8JZbBOnjAq8cMaE+Ew|sHU9Rd+E13e!QANJ1ex4$ z3=L(EnV59rm^}gsGpTL7gGoB!{lz!L1o??0aw!t15a+FGmDR?niVb->z?7n|jEfTC z+u9h{!_T6T!BBL9@)kCGsLwK7drkM?G`!bS2Kz-RR0w~r_=0|EO^Oe%K|AaYfeQxL z@JAA5%CmU^)!tM;dWRVIOfR#>$bIYJwgfb(D4BGD4P@rA{U~|-C&2X&>5h&=lhEb{4zICx&H`a1mn2R@ek&m5bE(1%n4e=}mt>0wOq(A$w z<)^>dm>dvN_=ua41tUh8nOEYXUSH5A{ft`%|E)rtVfHf*h^Z~a?tMD4|GbE>wjCix^$QC{2F$U zU3=>L*O0gtbEk*HnK!<@H8VrElp=1LOW&hghDT5NYuXm4-@kLRvKr@AD{+X&l?8&3 z!#Gs@JXg;DGNtV>q@z1U!vqW23CVniFmyWaR7J`Q-s#xF)urigr!h7l;zsj4w;zQ< zmowvmIV7^`3W>gSY+r)fL^80z7ySpW6~?`Dq`3o#%q-c1x^^}@c22Sbd1 ziz>~VBHrsWpB;K`Kg!fIGCde_USfBCfN;)kb-;E~zK>>d%P*?By@;HcGJ5*N(lcP?12pR%hWCvb8uj9fK3qG+AP zZn@U$>q|PILY!nkhp}%L=WNoy>oHZw$p&jwW<5j0i%Dnov#8gegqkX>9%L z9w$V`1GGx12>59X@i?3i|IqxHC!dNvzfRmg>_&hX_~jDl*@q*fLe7B zCmK}j1u~i$5I`iL?!RV;XTg_Zz*c!0lRh7)!N0ck4-90qGi!SD)nh9_@d#x=SjoD{ zCb*tCLfIq=W{9?Sx4FAZ@`P2-J&TjLLZ6p^;AeC=0sdSG2hUtEzd{muVwhS@1%PI) z7_nr6LYF*}EF`C5MVVd5bk{#ta!4Lf75^)#15${9oPZ{Nn<*=X#}X4NNyJz4*vFAW zf7-ZS?N}LeJl=53aJRMEXkKe^kqXiKMubO_%$gt-B7GLhlK$A<7ske3$0u8^Iq;by zqLz`kJzipRp$;7`>eo?Zt&pRaf|OE!qodrTzVN~>)3KRJINrlQd6*8OnXX|X%>?Ih z#+0x!5Wb>999Hh8O4)&y@3;4%Yb$O6@t~h5MS^^!h%<^(<|31Env%euxMuc-lM=5= zn+(!)8q};DL(;(D*6~h}isaUsa68CpH~2eei)uJBKs7YMDGwwNtEv6$GWfHU_+&me zk!@moqqa(%b>pS$fbxE=q?TeI}q*THm^7Nld^tjz7 z3qP{X2MrMyjQSgNV)j%*9zDWU^M#K^xHNww324TOUCLw;-k3xe-H=4tD@@mjNGvK> zpYZDH>Ru8exIH}Ef~_Y|?5qcZM{asbVr1~Ca$;Za7qUaov+(gR%SmB-z`G8I^}-E; zTLn{QU}2?7nXcwdV)pqv0Z;ucCDxi*{A$6pW-?DnFbCS%pK!dg2i*C;K-;STr#d{$ zm1RhUGjDb3iU_~(ai$K*-kfj#H)sJtD#RPNrUrfIY$2r##E%GEg(3V5G>AWDz#|ad zdQGXLRs_@-b%AiwvEESNM8G&m&UJjAQKWDX)0@$1exc}rp25vs(vEm8Y*F5HlFi!`zShuQ5d4%f^#HIjAfgg#-}h<3Rfr2Zihr_zV*` z!@qsS2^XiK3qN$AS= zIPhKv*GW+5rHb6a<0z}b0MNBH4jAzH46ht>^tfZvaL)AoeIZfzRsU@2rl40|Z#18H zFaCB_P2-CB+;C^DPlf^IJxKq&8TmC<5IO2Zd852R4TW&QM3d3D5~lEFM{u+MmggD! zBu>v6aoW`-w>R2o#qU|ZRN~(7Ufb?Dc5F)%^MgZh3;AN(_uJ>>HuJ4M`!Im41Jon8 zL#?`-+g;TWeUSR)vHXJDjGy>r^C7R_IIh1I+4%bVs$RmKATR%r!ao5Nxy>qx!#VzF zgDfbo_Kxrt?^IQ9!6wU7g@!ioa?G{TTa-R}DR9w_$m{je;jG5g)%=pU&I07GS8v*m z-WgLNx|utnSP7~Lr%$#lh6aR!v}Xc6DBLY0q40hwhOd{Xjp58q6rXRCAl~k3jqvFd zt8KWEd1-!lgEDe5PuY(&J-+J6-cnn~{>u3oJpcaGj?BmJADSKPEZf)j*c{g>5Mu*@ zz}9=}|HG8+!uYg-D^787Hy3Nny$@Nsu)r$r+sNwK_Yu1kKQ&4*1=D;)9L1$nT8VCj z9l_3_E)N~tCTUBXAufa5{(2Ta5!JHhyTbd-oswQ_uhRO04AK?T)d6b1-q;=*7>FMr zJm~T3xm!EE?e2ljq|)OLiVxjvjGL-)J<4HzWv2$Ozov=K|9)YC{+IeUh*cMGed-z? zv;I?x8iQK?7QN7kjEWpX~ZP>&rf&@d9wXVL80k#B|N_!FAYkvzo{@g{e0r zcbq$^qwS|T3GF{uY0=NYfyK^;wgi>ZSlb^DkWIU5iOa~FKYQ%f!ih9J$NGtMtl2@t zZPKkh&?#Yq!h(Ed;XF#=Zek(dSX}$qF6T03JJvzG4tJ-E6n{WMt^gqqP|lohT=s^D z-hi+sm5%GqJqU*$1CnM+pw$Cn>0x5mL5bq|M(2Jvh*@S!9Yj|3W{26HVb_8j@x5~! z8H&uWou4giK21e<`dbW_Q$o0G6V_9m!l7-w1V(VfZxo8Sb?A#-NXq;mA)_Orx;Y^X znJ21Zh>QObvsYt!Y>6j$0ph-3&45UZU($4V28arK$uC22I6BP4L{c!g%*Pmw@2aJv zpn`qHd#<3s@dPc>IV#WP>Xjv6=PpNVBgfE9bU#5cujXB@TpI8*b?K_O^wK|4aN<*g zqR~m&z&y=n*Ij$3EEF)>m_77%L3Ky0RYStA{2BED93HNv+Whc(_QZqto>cBm4X4d_ z{Z}@GGZHn=09APrDduXVLHQusTZFakh+t|lB7Y=Ei4$KwHcq|UszO{6Qc%tIQs1ZR zUQ^1Fn*m3$Lg`C9%Yl%dE+x*z)PaMk;bSmW$3NQxh-Wy)uO5XmZnk{&B#RDCs&rOS zYgsU31U*t%T3Sn5+2WC_U{M*;X1l^KAY~mm=Hbe3YPGgb7k7jR7xgq2j(^EAxny-+Wn-kGGshQYy zNKollnbFeZAZfylp$i*LPjpc>k#SIX5qTe#_5xcb(QL|D0fIsX-Q~8fN{cJ{mdgY! zh58a^Cf0~l3hNF=hF#XGQKB<|P2v*6&T>*zA9`=P{f+u1TFPQV>f$O;}QR)xeb!vUOu>px6S6*p+(v%f*-tUpD>dJvL<+YelE5DObEE#Mc2 zs?SkYs}N)K>4G4B;G;eE&ipwUIE@}R!z5h;hY1^T0AL-SEpvk|4w5K76wXMN;L(#J zm)x#-nDg%UW4Au=s|wK1!dvit{MNvhK~2U)o?m-8+g2RZXL?vdxm{{naE*KduO$LB z_U)mnt%n%D_(Qp^Ktv$Nix=M=<}dIsZe@_Jb#!V$A{`gax_yw^>lfOHkzeQ51k+}Cm?LTn zsXVs<;ndXXgEc4L9%6LXn`TyHaNapSS!EMbakI6P(gM@?1b0jC#(ckobNqJ>lZjP* zKa77EEWdl!s5_;=bynmbY?9yR#`h8ILr2KmiPdeU!MhxHterXSF z8l56oiLsTqHqE25SgXmlP8!5&RL=3`E|&QL?H!%cv&p=+8x=m!ULIb!a!JJEuN9HM zlmlj05N$mIg_QZT{-~|#q4KT%cY~q@Z2Eo0@a9jYTmN~C?{hD@)HMm6>wW}6-rkx2 z-{gcE65?CDV^U@kT>ZN*Ow3H^fZsxLgh*h&NVw7WD-W|jM}3rynPnN7wz61zUmjOCHDWfi)`|`?U#MuembrP=?%km z)osV7h8J-1IYvo1`TIU0Z|)QM+bQ4F0FpeC7`W#Ytiv+j&vzp*?SC~Sl&V3>#}v0L{pDg=^~^3g0jG@O3*k7}2S3(XdMGx>9GUg53>;mi z@d7wM%|X@$9H0Xy0qcFO6oS-r6e_@Ajy*HS_ z+_ni%gXuFyjHz#P{1ysM?(pK*yzE!uk9sGaHragn$oMR|(S8{{r~8+}QktO>@}51Vb4P*G*Lx!M;G-n}(zK zM;X~66HXRrrIZ^48sMx3E z*e~wxlG{yjTifUpl#AP3%p68y_IwyNoz>PUYIdOL@dDQ=bF=tm!bsc{R)vTLaHbJt zrU2u4#GGZyDqoIM`KZogBbO+5Vq3LI=lHr!UW9aoah~L;+0|{gSzR=J=e&R^qptLB zAXtO$T-0u&hoq4TtX8(`J^eI4c}UX1xvMO^U<1cr!i{w0&C<2QD5TmyhM;(!Vms$5 z{z&$!PzB)$BWi?boSe{lJdSGM{f2t3Ie`Zs*cwA$5VwJ^+HCm?6N$nV+fon{O8Imx2c)Rt2`!*Q^alIiak z@0)0N!UquoI9o?Ggt$@H6(AV|2pSj$eG_PypK3z^ zYyj5uxzEZm12jOUX+^$xRlc)ji*mpzEwY9n67)J6zRl7~X;}#fk}1Ww+7b)*by2!+ zFICLC69FZNp0%%pFH~uWx~;7P?KD-I_VsPNTy6RMT$er~MVMO#L^`tiyr;~}-S6)e{+nCd^bdppCM$MDO&tX`y*6@-UVF`o$q7%ow5w|VjgF@h}$YQ(~6G3lLUC+@gTI^rbrVJXZLnGD17rm z0qFBBFsV59V7_U_y2u$9oh&b18+YKz{N=m^yA5u$S)0q7U@=H=W9xbsLZP+{CvW7S z&XhUes`I4F9O!fHHU zU7h{A(-QpeTCeo+a;pU91HZKn$IFOu!stM_c0vFX33%}{%f5wQlyuW%hw=!w+4G(O z+UYTN_ss~;yojfVIg7RDY{g$$r1Ybk2ZVkFZ5f6d_qdVVrvB$ykaw|n6IiERgNBmT z`35p}UPRnGyl|GA)l8LNS8M9lbK)$@eXDtMQ!go~e|O`)t-MtzT}iDa8j+V~F~v0% zfAvTH+&*(z@ovQL$&PsxHYfKNYgqCtEZo}~W0ZRvMCi(m$?5B6f zSt}n8*sC8d{!dG~AgOZCu6MuN|784rgK6d3367d}xq9c|6#cdH#)}%n2}O4glg?rYX(u zzj?qhW4a47ER?a#W8V}+3zQ%y#W=T_uPrN@WUr|E?VvA7XZ3*~?K$0K*WFT5d~NP} zuUvPpyPig?ZIz2y%dL%~q)0KpK-JXaz)3n)X%Sa_3{Ah~;jM9BE%aN{n$J*G?h#3q z)C0m_g|Pdwk0|M$>QV#0uvIQbt2e6GfB9!xEMEt-ISFUNWrWL%MR|FW1d>3yROxZ= z`}6yKY(qt$_D{zs4k+8ccmqC4D9G(H$3NH;jdLr5rCJ^5mett$0BQ7;U*ghr%{dJT z-vQ7>D^-!yQ^z(EA7>J>_Dm)*S&rGkrEGnLO2Eiw8KFl+V_TKW;UF<_Kd5MN#;h}z zK}t$29y}bIu$_DPunzX?VM&cvA6Z079KXDJ_4lzR|4b&;EwrA@6yt_n36@skcr{_t zXoDVeVv&`=2-WwlQD$G4kEUAJY9d?Bc}Ci_8*qr*(Z>G|mxSSlt8@Cx2Q64l6#V0xHRbdn0QQ!?^bNlK?KWLkOB?Vy&a;8;3^#IU zFuEiPM01-4kT&L&hxdkz^KiJQTtaEc{RCZEU5jM~7*K?UUXt){ zdsRb|NUO$kjGBy_HV>UJ^z>A`85O8Pl6jrMj7F_aQ3swCf?^3v-xlQxj1p_Uo_`d% z83aXB1*JB0fUeR7{?!Zk%~}~}h&Pjcv(w_sb2nX-AsaUS%CZn0SR-gVuOG}k!q2(U z)20tRf(@LtSH<$c{1gaok13@#IF{X;vNRi0x`MRS#?iMnZ&@B1^kkKQ zm(^4nrQJNp%bFiJMC=uW7UCZYsbhxk1&vLkJd(s(+bED%4-cpV$K^evvtMnPw8s3k zb6@rEuXBHtBJ`-0)JWjC`&aj52TU^5F7H746gTxp0ak9^Kbn;i`B|$o%Dc>W@cQJ7 zQhhaBk-8?~L_3u;Mn>L&vC>U_gOErjPAPho+VM(aKXrN&rlr1|e5)4kS1vCM&FoW2 z&e6QSDDm(<^cz2rFeJUTKie7oNbT;Mpere_JTk`@SAxhO|5Sz!dWR7m_^6*4r18k> zA;QBocT7q?6olMV=f~jC0JIQ&@N&G8U5URWDi{gEEB@V$m&hJUe;F^SxXMKoc@A54 zh!HzDc8#OcXSr-<-{R>K=cGi*k0s@ozQ)hFrT^$0OP~pazh+PN(-`zGeq55td@w#7 zW3q{RvRKJYVieojXg1_KfrabpI%-AO#Wg#f-mtISV*1y$vFY9$0d^*8B+Z~a0~P38 zNHUsw<*?vMH#9!jW6RYFzWthp&@2)=T-V= z_8m&&P)j#*o@qHg?*~^$b`Q*eNUc%4Q$;*9K};>fmo#Da-SmYP$nFmqDVGJzTwGLi zv^>;;8O%0op%zT}%Vu|}`ovln_A(3Vw?o58s7W1riQ+IP7NcajRZ7VIxg1mMKgw0$ zObW5si-wEu`a6IRpXU3HWzMcD68F+mS?*y<_wKj1vpP`3^IgO~Yq<%w%wA=GPw?eA zo2xukX%ByQNq;D3!{4DtH31T6puf%UTn$QK_PZbX!CGD%>b>oXerLpfuU zSU&1DzBQ$Ml=4b(i3?OKLm$_uACre3&D6=lz3qD|8U#3=x1u#z=E zbXX@nq|P~cVw}1#7++N)A#a5!0Kaa;Fo+LDY!aMv&We^Qt{&!uM#&rXm^rv10R@&+ zx(e`D?=JK{HjDYsv_BFKL;YRlvgZRUtQ(3ys(u8~0OYxG5D@3Av`o%rt|l6TWF@jO zeq1W$E!b3bAE|{xe|A|S*;ej%ALZ0^&F5TXo$Dq|m*a-h*p|?dPWtRiy&>mJ3))n{ zvefy&wqg>1+R;u3iHo84c)0;({Cf)Hw!?L#Nps2~c!Cq4jYF{gR3%(IqTLxbgle47 zJg&F4a(}mBPG2N3fwTco&ZNb5CHDzKZV44s%sruwYYdtt4pb-t+1*BLIb}{`4E2i} z(cteH`23}+-|DxwLr{rNlhteJSWBX5P_s_$RNOB@ARTw_FCXreig68-Tr#}u@hb_Lz+`u&EO0TxOEp^DazgWT6xgddSG`nBN5#SB1m##t}6&5J&leb zhqTk1NtJ7(^_uxHc?VH{mk(#Cv+3FwvRNhCE81Jsf+8MaNG_%Sh=^8`53qJ$hhz*Q zwsf*u_FT`}1zo#Vl1pYN6KW$LCuFy3W6mJDy^#}%<6QMkFwLTAwON2U8l_V1p}e>B zRLh@~37DP06si54L}?C<2CyRU%2(ByD*GB}ErB9B4P1zOatG>;%ahk^Je}K;Z_+9eP32iYAzcMmNE}>}|JwoeG~=62FR8mx zPujN~Fk#8+)Knb^L>j5IGoehm6h|tXD6i1W4X+e}L;qVsTeO!4^z=B0g_Sx|<7>*M z8Y*D}GMnY8aLj++oflefgGjIV|0*f|d!wzJ%n~HM=o9kBSov?{Eo571)wGLbGx!XW zq%BDbI(n3onqisohDFl3R7pRxS`XmUdCbgvTk&PS=1V!)vV@u?*X{-`IWZ$;VS`u z)FO}q9it>H5FG1@v$P5vcerCDf_T8^L&xoCad^|K4}k4RbyvT7wz^1rXG5LrSz{hF zs5x+qtm}IE{iez6V)lf>TAn2qw04ME_}0p-Sk^BT%MflGP${`47ee@pDz3g>p5#S# zfsgx8C+W3%&{VuxvNKfei5>=wlW2+=JYtSkAF3*SF8zGWpd@pxA!AAMnf3tm>w}C> zuWJ&1+Y%`msqt6Z%Q&&Q-;1?G@#2z}SSHd`X(9{NMu{>EKPHf;mN#4-ZuvHyf&j=z z;_g8ADd&e&c$8$-s+5OOssV+V{F=2;c`25I?!q=WW7mmB23MnYGzmhlcg1=Q7RTbs} zJ_dW$cas6Pd86c2;AQ!G%f~Y~jy~9$yU9j<>0!V;o9l|Kq%&NT-qOXYYd?KH=)25qsJt`9?Y?Ah`dyiPy8VJq7JYOiq$#w$iS~Q8gQp!lUxk;dZ+&OiK9H6{0T<2&gwsdS}(I50% zT9=_zWzjCQ2P%U6pmxWJWQrcjtK-hWt;&o4gwJ*+epd7Cu0l)>5?B_aKCd<{k6{ z%;FdjrTDKh*DVcbPLk4LZ|vq*qaN43ou37+8R4;80ppKRu4es4zSp4A&8BhWHAh$b zWGJC_HRucjk{XOj>wN*&*~6_HdVv1a`6ak>kwTks4ceLXuW4I&s?FA1ef>@w>c|TH zD$af0yW#i@lXt6o%Hmg?IMY&6WW3lm#~#}=NYG9@W6p+KTS~@qzFk@T7KzW8*~FUh z&@xd;nj$0>KK!&Xcn1Md>FlQ({B4_wZ!ZR^pKU`3Y^9cUQ3_?!@?;brN}k zP)mgfk$s>l-Gq!Sc#i^AiWk__G5E{1nbNBKM-0d2t|;QjES(PmmSSSdt8jgTzn^^J z4pLj0yr+<1M8or_{Wjq8Qh`(y5HELBQ=gKs3Br5V-yPnn&Oh&*x~AO6ZhLw_RCK5f z-cTz&ZN3V`SU&d9oIt^+{Q=@T#_6ieA-C2ESz5;#M{g;gZ)>+eB8sekV*>B-D*ZB` z`UM05LzzFIYJeM*6HlLqNY_HU8#6d2FpYjOXiQZk!+YRa;rAW&H;;{fd9NX0vo(a+ z@BC#hO5)E;%n0WEhf4}26_cfikD;8v~G(%`g&6~EVG6x7ER+|52txvZV< z$A?W&Z|4f2>8yQQ$i!$B5lu(xy2pkZv6Ne?SpqPV-xL&n=PI3Lw0QCk`SgH@$6y9L z&g2ixe)$ozkv5_`#qmyJ2a@d9wPN>9!KMv|3BM{#QggpoI?7(~bUUzp&sIJO{h>M} z(gP&nknO;7`3Baa?IJHFRG5i`8TZYnr_jbkkR5oukYA!mt9PN#?=@C45{vc1UEUp! zNS=d8SG+4B>2M6NZFhh~iov$YdnYF?a-gK?D^JeQ!=E7m(matVU1KF$ z4U#tP6UaaCu``erI`F`flA8eFsx=h+P<+_8gn4CYk{R=i3uiC6s83#ONvEN^DXs|AYx5ED9$l~0uHDvcAi zS9zTtf|EX$R8cg$-km9MOJv;@-hX4#jz||z>s!NWsaF0mYp2b`)n~DzzndGTT+Cv4 zTHcM4tGAv#75;oz{rlq4KTf;4uG{h@uq=-?MA4Taqek{g&nNPQCNM^jp$rU^n)Mo|jIy6@ATW z-Ns^8i>`>zljfb4;AG!X{1-J>2e*d&)8%2Zy-;@Vs`sp^ zG3~hEcJhmA0<#fP*0W}yJ2X3(l!5B5MVQWg=76y5RL@|=P{u~x&C5+kH(WVlc!C#1 zn^E;9v;EV>U3wDgy)be7?ZAcN0f$Wmn``OAkob~BAJa?Pqo!Or%zbc*dU=vFx1pc; zP}$~qd>cbAt7T2zaANJIiNhIpLXf*}Z^&{kQyXE#$u!FNu;oIfxFHn3j)&8kfHCYu zy020w+UwUn&@#oI!=W;I_0LtK8}AQriI^Q{#=3!rF+gLI;+#PI!ZW;THJ5k3_;5R*T5IGAer*W>tC)&%A!)a9Tg<6-&uIp~_xX=}qL)o! zISo@G2+#wq&~8$cakPQs_0+lQ_1h%uF2W?{fW2T>A*(7k%jZCEOw}HzweY7Y|CxqG z{xj`iWH^?fqoAf$okz{jWWV~;`ar+<_vnY`d%1L17EXndXdp_S71E74KLmw^av1Fq z!2icRRX3h?bZy7OQ3aAl#y4eIB2+*($F)V0s;C8jh8cW`k=Oe?>lyf{c2?PIOR+%B z+JTmgMal(kP#Ra<`*t&(|7YJlf8kEXcsqC%r@lxL@!Ac!fY`*1t2`{_nKMIoD$iZo z7-}0O+@iW$aq~(}@X)uuiT2|?iNBKrcFV6inD&Oz>s@mmc{hUf6;nt0cXeRn8z;nr zrkc=C_}k7U94W8v^R{op-KABm-`)-t7)sd>)={rbkA?KNr;A@Pt&2O;T9-g4W(J;e#qi8_*QXhH9X?BCZ%$X1*&`y*>+ zdyud7C#TZtFR#~9_gBjbUH_R@E-cF{eb4H=S=JDhK{@a1<&)-6=Ohb0@ZiQbuZN0T z^gP*hq4cPv`Bc7FL{goGO*6Mi)9lMDL{0x<9Z<~5R*2<+aL3O^ed)6sWQiMqG7b1+ zpA$KZ$F!1~3@R06-8_)kpl*D#MgDUmBTflXq0Y;%rb>B&DzSkS-D9lBR@mZuo5 zj^rfZ^LyEwTd<|wym;z7R#^T3#p*p~7K-pfDoUTQi9vKls(l^xGW%yGiSqNZfzF?o zanT3l*={L_)ElEHOb4V}R^uy56$zIpW+7ln3s9N}sq@)Jtlg-F7nHG3QP3PgO~}Me zi88l%oY+e>z+B0z5m~miY@&{!!_m1&gNx`2)1k+5lc`307$wU0xwu2O(KQE+J&fLo z#AkD4U%9b^p{b0czh%_le^ZrdC`Fw72(YpgH&3@WR5jKS$CJ zE(FNP=0jZolb44CEE7oGP12)jOKWrOjJpoi`kl%#VnG}fyxGV)?n~z0dvXVlZ+EG} z=Z{0ZE#hhw!rB04W$Y^O#g`dgdRcx)qnkD-xlX)6c_0lvZ= zIxb>$cr|qa`8Xo|Au9`YuA}G{w_u068ysgG_^|P_E^g4Z7!4E|Q#&jTmF&2T^x^nM zr;kw6`Oa4MT=?6eW@8fz5Ng=~!(u?KDs+6Crj)71f-_d~m5+LxHgJ-*2mlh1nkjnX zi-`sU_|JgHP=$0uL~2Y_Buq6M+sJd|HPTA(!YQ83QMQfh5|g1T&oKU2Nnv+9=s)B! z(Hil_%Bu;op2l7Dd;+)VSKYsh)Y@MlI1Ihr%**NalKKlLea&L~rJ-NOrX0FHG>t^N zj|%waFo>?d=V)_4iIzq_Ez|$GS{Y%vrrx9R894!5IOka0DpOdyWfCZY5_=x#Zp&pKEFsnojR5syRKdpZWI9D2&OcU(n; zV{&j4#H330eMA(|7SvX ziQqEOzsP(a=JvsgtOQ2t1AlFDJ`$qBqS^c#lRHmv54;RNxk7k!gP>K}1(?%RHx&s~ zG7uzhrgSs-H%{wU>D{8&f%?b1xKH1`1`JNOrxl9uao(W$^60qQFAlkysfwHij=xia zj#i(hDYoQao5)nBV$xsk-bN-+I1hR^RcsB0^BQT}w1vuGG|Zqhi&>`>vp7n7DPNHF z$y_|3g7 z##<=Q1GQWR^6>NFgJXq7(BO!jBu?YQU6V9%Q|h{d(ROa1rlx-2F-X77R5r*|+Jy61 zGnq>kcC(9aA8q9g!p$}Yo^?8~S0oI~rBg;W+LYrD8p;qr8%yGV9m^V(2R;Z@m!-1I zx^sT4v@hk`y&oQY2!An6kBIMdGZ^@UZ{AEDs0ydIa4y9LlV}ZGfr76UUWOxghmtU zyNoOSp7i}Qt-03rc<=NY^nZRmtOKBw_~_Rl=Aojzp2c#F*M@ec<5Pc zuR8Uc-Bs)|wYs_%yMxpm5OiNvVp~sytCF-gD>p_^g{t!#hc6?#XC96Sq|--quoU*f zQGo=dnset(=S;u%i4ZTh`~feUwe^IRj_JVN9xkKrmw;8Lck1Y*QdhHC5kpCn`OBp# zWl7H_F1%iOBHv`8ptIl>Oto1b>HB$Nj1n8q%-SIJsqr6aFU?ZO3a$vIW*|vwWwZn1 zEfU|r$;(bZ*6EaWQ`KEP?JpYjnfdZPcQU9i$9Vlks_$Ttwn4YbUaPZ2gtpWBh*B}< zd%FBU3xDxv;xibOaAJ_kk3X#Q>Yr)56(m@|Upt!y>rnDFMly9C@+jm6R>Sf_*DCG1 zNmuOn7OWEreSu4UfXf4vzqpm6V97WkZ!>|YU0s~XK8x{bYqKIrr1uq4e_RsLjHE)yqM}4i)N^4wfXDw?ny(AKEFX{ zT&ecE4v>8>NwaS?ar@Qr!h5NYL9yyic>f>t>eQY30 z<7qaqt`|l_NRF!yL9$+(+pGUZCi3hp`p$c2O-7b2+T14|n>fy!<+CyzV-1<9N*9S^VpX8 zR=S=yh_aGL@}co6{d7dA=JveT%nPzy@R1rqNT3N!Dj83i)>0?*XE`#5Sv$H{Ip@9q zvkxri2Nvn$ZxfzZ%D~{d@kQ+qWQeJoz2+D8AvJ)4IgfkUi_lDPq53XqIXRZx z$k`Go?3%$;y%PI0ZG=){4rZUoUMA$B=jt*k^I?V(792KJ1d`0WaYG)Om<^vQku66x*v0He;LfQAQox93Y^DGY#rtbIfM%)MxEl z<0Oidh~6xh?{3Bj52Djm?W6S-;(9{+#qr~-cixcN?x1WMz!xhd!q2+W$(K<4xYtnU z;`5md^PcJ}GB*>_W?oOHxyb!(cqPAd9k*reZ8z2T;B#>=nBFU-r&@gu<4+W=jJzYz zkCe=~JG9NRE(YU0wE19fR5XzNLHF4u)o5RvIgVoIUGYejjs z^kzU^*CEj{W?YiGsIFVND8>DzO+Rm5m8P3f?`R@#&d63Y$dER-ArPeJbR~Zdnb_7u zAz~39<|v7mD|c(-?oWp~9bNUEt0-oTm)3GDQn(2-{R(sBjJOK7Cc;$!e8P zX$|rulpF1soe~iXj6*sXqX+N91+D;L*&RB8L3;x3C)?wRI8Dr=(m& zY6yL)lz=ksqv2A4YC<&)JR--L9kqODel7{{`EYThvbUjkGzYLibXfBrLQJPgGv=%i zsLm+3ac-9*8t1_%rZ(<;$|?jxrKV^rU0oT0j_x>(eEC)3<}R+|pl+LakGtx8=QWdO zJoZM-ev^o;yum6lexCMdduU6W3jAT4`{BUT(x8ig&Ybfx4Q8hC5mfYFYo zfr*6dX+7XJ2bqjhe^5U*8pdC#s1iXYH(`>p)mn_+yZigP%CqaqZ`SYi+zxs3^!D5O zz;I0Ik=jAu2m~;=NT;m4Noom{7htw@?iCAdS9R=ZvOO`B2imNAoYvLH2cdp3!<~U5)!n@ZVP^$6Bl{R0&jG9v^9C{1Rhas`F#oP-~grN>y z-Zka`FG~&G%FGt!a3btz3)W`M#wUu;5}|fGn0bQY8%gyBOipbP^LX#!wZ|!@%n2cT zQRUV&+=e)pDa;U0)@mOPm*{plx(n6hiJKLV^ZZr1W(|~$SL`@lxtFCHB z9e**(GJ*K0Wr`TnX_BJuiU)4VA@vIjijlUwxZRq!eU%<*_wu5zmo$ELdq_`UvggQ# z324Wqg4SGtt&CzVJnFMQUbswgYe(YC@STcA-BJ^c`}%{1>QWQ!wbgkIy5scRBaOY^ z;ZExxt^1$pJwI8;$y0agu(^_SMdKC0bli~rj%`?>u&$>kq!Zv_Ki#(6YQg@nneKY` zhI@8zdZmGrjG#Zj-us_JpV8Cb-u3|zM2u~;^&J+!3gftbMjETu$n`J>YwK((Z^oE5 zG(B&zLsCRg%fp=gxp{Q`_H+AA;L#Hnt4e|nWO-hp?JQY-H#Bc?RNt}mNLzP0MetHE z1Yg(8{pfWCLqnI*2Gb&PFdBL;5a&iKG7p@$V_Tz0{*iab<{QHeSi?alR*}zcCsD{p zUI}gp^jRJ!dXT^YP%s8G+am8sxs2Q}?Hx%OJ?#1zZ)`JJzASLj7yrBRyXLsL{^Srn zhO6+Pqs!l3Q*IaHE42?PvzgInxXJB5R#tSSRBrsM?K-ip%9ho3a^tQ!zv`r%cyLLs z5w3PHIh(NtN90|z3%;k?Y?TIp)rpiS3W&-+c(~GXc))_47cicOwC~qUOh?NuXM5}Z z5q6m7dnL;6)bq^U2mfw!-T+}P?~jzh{GrzqRO8=c8;huczz+KhL=}_u$ST!7non7?8|L!Qv=4Y2{2|EMrY+>rq-gmL+CT7>HFCzZtV_IOpJnuEhx#H{E|^amu3Wj{2vKjK&eG51*XQ>5@Klqi;N(vo3uQwT7s>}8( zoTGh|`Eg9)^_oJ}buo#{8G6z}OZNQG(WM^kr@+=dI`BVt?*ET}Q?F1_8^$2<3HGBB zwCrej*5NkRI_0tQ2yh-Z! zym=y!y^&yq&N%3x$Vy-xr3`ae8{q93SECmU=_=ec(e z37X-@A)=O%bBxLIq$R#t_quVkM`rh2=3;C1Yr3@LvwN-aIW^@{#m&s%70s2lIcT!8 z&Neh!KD_J;djAI#DMW^R`HylK+^8g?9-HwabC9Vosns=n#IpY|_KoZLOzAvi5+K(d zI(@Ea3th#C#q8yuFw=LlkT1XhU+NOk3H+s&U85^3zwu9e5Ck=F5*Ck3gs_EYRO3q& zme~RRxy&daeUjX9rz%AH^E;4JK{uL=sI1YBYBpIxq>e5fC+4UkTHKd6x%@m9}0^yUvBW+ndu_GNIB(kHNupo%2{4Ho|y3+F; z;BykGd(ZH=$kBV@LzZil*UG7Vbiemv{2`ZT&3nZ1b^dMtO%t6%X$5)PAxR4oY#|Ri zI_RHiJJ`wmiT9EVig%z6wWOsVb>L~{f<}x|3q@fIpld*xLtW(7%FqwV_|K+NK(gfM zyt7uAHvEl;(lg$g+HSFS3?AE26DheKd?-((di1D6+0-;L*2D2F8Q(WXRPMo9;aDN+ zPnFXTdlTGD6Q;CSbVbi#>!BNKm`Uz2lR7Y)klW5*)oiz3#e1fS!4condeJT#(7cxk zp8c4XTFH3S+wSqTo)ak7;a9d~1mQKAQ(})ehPHBpIRc`&q#WsR{Kyz{eR^>d?{z)c)1VId1AN~p$UiV{Fvm|be8>1Fq=H(tA{~YDg3}J^< zDH#ou77c?qMCnVL?YDLe&9w0{-&1o$xd>+B6>ilYLcJXq2R|;3JUH&0C-WrExOIv` zVXA#QQ$3Pijk;k!gL6iG<6mXwH;%zaNdI|4EJm(io-F@Rjf>Bf_8!#@Zv63$Z=zgK zL^`a#09VW|nA@O0VD6V?v&KZT7O8iYDA+u%C zQ4p=7i#b!`n=-2OLltErx;~2lE}YJFE-N@1*lVn zp{LZ(4t)@!zO>jg>uly=`lA=78(80VBeLks1=fn(&a;U8YyNRj?q)bK-v9k3`fPY? zYE=$;h7a?x{NR1nx4@E9{29{zh=-L!F>n$I8CcBFRl$fENX=HbSvT|T zQ37tzxS%04LPj7Q5~~gex)GH&&1X2DYJVQt!aUhMxu!=*92vN3GoHFgew=bXDd%QO z(;@*0wf^>LU(K!DI|MhP+XU*YMQGH0L4PG_My3A~Kq1J_;d7fcvzVdX6JjouY6{}z zNHEhcQD)<1Z)_)?9`2 zke-59-!+FE{xF!c-wXeG^H&hQ@x$+uzsgIJ!3tvN78ZEw zm+~KeJ9p&ETzntjj%ntly-Be})`r9+vY{Mmsu!M0@x_vXp+h76qh6ATgh~eK zfB?QDP6{p`=j}p`x*Y=#qe2IHml=RQErF0mY{Nzq9sOM-C1w$e&pwylqlyj@Zs+&0EY9sr zaFT0(`WSP1YwX3%eVU)80)t@oiVpyGEf4yho1Xw>n|RRa`{u8%r?u0gKG8|_f}E6?7@L}O{q8@pmE?scioC9>?J;e;Sv^e(NE7G3;Z z$ALmHzu@}G(3dk^$tEtzA9)>MzR$eOB8fgkG8zXKu<(#J__3hvRCx~FFxs%Fy=RQf z(#sQ~<_AVBt_-=^^`DootbJ@_?KP#zc>>hoeq#H8kHn^AE`hUL2zGSe8?b8Ek0!t(3;@7BK{(sbx|F7RcanWWX{@k`k zRk7kD-x3{Gs6j;9t=L7D|s^?jZr{+BGxn%Rd+QIH_%22Gi)2NB@@0Y6rc0* zn)BW(m{h{Zk4Vtw_S19f^>?i@vCfxXzZi!8ekR{t#5Ze#@z-xaRrBzOe2--8p3AO} zRt|rs%B*xcyQ9~p3=I;9wi%~x?!WgwdrPa=yVt8biKbJ8Q(aa`AxSSVQD}?j4(7E? zP_8eNy&eE~!C&bAWh6=ERmodeFnQC^C{Cd9g~W5wW%`hVbE0tTnX^O4=pocmxlQM} z=+mas&YzS&<}eY_rb~to`y&MjVbUty!^iIu7jks?uiPRqWS#e&a1?RU&7%&vo@N?%-J4`j5&@PrY)mgj+mYI@x9FXO z1pBU&Ew*9YvHdFSpJ{4=xpoUpB9U~mIV#q_-R@mBJdZL7Et4+$CLW@hSZi%?GVmT5 z)LQ;i%_;nixNk_3)tA8^Z%fEtA)QbH#jlVJTfqQ<0!Z8GyP@y z0NAOtoI3IPEF_MOi2T;lC9l=q zs%}u?NRcGBX>n&~b;Vq#h35Bt{+g<}7Nqf&)u~+ZryX-GTN% zsa0tP2DTE>|BUZ*@frpn5a5kJA&F~&r(C#v^ANpB|Aw3k$aiU4b+mgOu2Tk!A+vUZ z-fPlns?1N&rsXmjUeE&k8;nDhI1VU@GHMLInn|pyc+63B{rX{p;8`nJcZ?N5lojvC zx-_O_`)W{6jaCPiNyc5*Nh?<)Qig5L_+Z&as4PGUs?C|pHmSv&Qc{qDa37M&6S@MJ zBv1|$A@)z2DTURziGorXq2eq&bU4TwCo`ZTEgP%y7k<)`Pjusg@uiY3s$y?fYEC!L zv^R420(TT zoJJI{1cw)P={Z$NQFOWi)8D1IL--+3fLY&2(^#N6wQ8Oy=diyN=SVf)-Z{esboxP1 zNW*kcr2nY=elOI4ExflQ5Z|Ml`Y2I|fZI>oq+2$wJxl%NbwN4H*L(a>SN&8%V<>#= zf2Ix7ui<}e*_p-24?c93cl_L;5YNbHZJ9Z)nfRpNW41H;MVNQQC~vSsZqsOtEr%tl zYA$On>6*J9HSsMW*5opSB9dtXW$14VcD5q*NzVl)I)Ez1wTveuA&xYJm;Z0 zQo+z}X%JhYQzL&0>W>}QmEV|dGW{ZHd8NHAhp6Q~Jxq zOPfA%zME@ktWq3{*($ASI;|DNl;S&%NUiUNcM&j}HfvYq{^i`Kz3M^3ph*7pQ?urH zUp|)JT*#^V7{=wwd^mb_=k5IdJYsZ%wchL4n3}(mqzXQKHjSNDw!KYS`7Mjk^W68G zYg*||9mBH~FkzBxx6-zR>GPkV{QSHGc{IQAwyF5$T)G2o^Ejr%kR;%@q00nMu6-%LO+{ zJ^n-fxmvzeS%xK*I11gEl|b>`hV**fYN1hV{coy8ub_K@o=eh;FuIkk=A(ACaz#Wm zp8#`$T1>EtfoR+JIo>_k`ZgpP!IC1NozL2ry2R0jKt(};`?eW$KM%Y-m& zh}_c3%Og*hCr9EwA>VMwF9Z^D8oJmSCIz;-W{PGyLf|-FZQ>5 zTUb$AaUs^?L-ElE^VZ*3GH=0dQ{nH~GbZ;ct>aGca^HvqYH*5da3bB{_udqU0a+%_e5Nn9#_OPsri9GTF1MZBZW+F zD#Y<O~=8$&_0dJ#s*&9db!rPdp(Z9*^)5Jg12q!;9 zJLS852^Q=f**bx3QA@tHMs^OtN!S=W)+>Ev64%oy@RuV=>3Q&tRGfi5I6wn*M*j@Y zJcRF9p`67HlQU+a_CpZm(^5<~A`A*ldm+BwR!;|q6wf^Z=^8MdE~Tu38%!)6PNDW6 zRVEk>au-K6~B?U5#qp~r}sORgL zwsckga-=--7j;SiBS#v%9j|d~xGaFU;9?q7;jZYYh&T({B!)#W+Z6SeO%?cxOq$ZQ zGGl>KN!g_I0cGJ5*0y&7sj;#w4%;oalJOSorAi%?j9AvWV&-+>%#-jQR>Wyb&>5O- z{lwD7TERlr@8t@ICW;j^S+=Q2N-$y`6N7K#Qjv(;0Z!#(5^RSySrDklsH^ZT%t5K5bl(`6Ubx0ZW|`dGWggm2$CgJ@6KDVX)^}#g89Gy~ z;}il8!@N;i#+q?#V3;18E{Oy3Uz9PsnKfN$tY^Ki5*29n*Z4D88|~wUF-OZ6%85FG z9zHK<0BG*o&N-G=Fh+dEJ>6{zL@2-DhAm@TH~!E_e7E5BGxLVH;NMqhLlRV$Q*@pm zICVM5cqwE5YIQ%w*6?F#+zl_!ZOS5rL6gAi*2!GFGXG& zYC`c^+%kMq@ zS(^+1>1?s;Cg(25o$aANt@UVx#he@`e@z=mt0T1C?5f{%wxR|ykxGUl8?_YSAwGN& zKQDg#5bp4$!wD4tb0Ug_QP^||*pg@#99G6%8z;9b-D^6RpkZ>|JW5wB{?mu=x#MsA z7fVz<*d+j02YAfubs6_!w2sI!t%^>43M#biI)}T!N<5y`p&jn$M@62cMx_*>{gn1a zxg^O7qzZ_IaXR)h%7T9C95$)XeP5`RF5nO-XAi?#1QQR| zT)AZVlcI&U;xi-sIP0%92}`$K<379NCeu|t(cZu}k!W z+P8Jn7-!lt^Kt2-q*FSeeM5#`wtgUW9I$d_j#Y^e(vYKHw2qR z5VUBZ2I#SDvZn{5@(vkeaUv~S+Ir75&JEEJ>OOAPO3=GAc1@t2^K~QNPy$bfvk-cw zUwZsPL~I^&_6agcW|JH`E9(VJp?@+z;F4)z_^D^^ZI$Kkm1Ct<6GiJnJGJ!T^DZ0v zZr5X7nX*b1+^W10xI@#qq@tYrrNqhCr!x;EdOeB9>B|m%xTyTT?{bKgwy7Y=z?Z34?`AF|)*tNWYYm&M-#A^V< z^T1|?i=%y0D$mi1$okEET}fWgSgK7iu;G0@K>>t2WF!C)3jduAn=K7R9vj#4ROpd^ zNuah~2g|`)@SVWF##q@R>k~&-!xW)dGVD?5+yWP#?_tjeF!gjpS;$h{VBr}dGz4hl zO_ibnN(OEw=QUn;=pcW zHv4cH5t&!Wos+AbB0@!i(hfgg{ZN|D8e2-u4bL!2J1_S1exHz=HwIf9*S`*!<@Gg; zaVn_fi@*%EL$&A*_drH_SB)eFs2Pih+L));grt$U$c{J-suiQM8`BX}O34Pw9CCt& zn^>xU&f3}4oEN^>>fYWafqL1+Ib}g%5$SF^OEno69Vb=fa#A9h zN15Gx==kxjo5#4lMvE0*9xRnrrOLi!ZhsN?PmiJ#&XKym1WKECTqSkaz7lZ@KkI`dumi+wBGNKuZ?nT>(d9nekN{rtmQvn+V`Yz zjpnQD{{}<otmizJOQ@Jv}G4GLPW!Sr!+KsqP4K*jgkJ_MS}US>rQw@ zU8p%j*!NQHJe&K6U?J^iY9eaPK(Ji_Dp+;2?iF?{J@JXGn+J+PSO3v^ys%~PS9Sx* zulBj;D_dD<%f>qTP>B~+(=o`9^e9%l5E`QBsw1``Ee7LC0(O*#6N;@GpU&x;t@h_K{!oNfI&$akM;}==U zk|U9J_x_Hjn#Vu?g$x>umdLpcog-mXEv? zEXFJwJv~@@WCNOn$C!a7-VZj4UXqcG?Qq7f-eWF0K{2X!1`Q2-*ABSe*!A714}Y-j z*(P(ZM>hSUBZivt2S{Ll!GFe@1rnV)Li>m!UoN~@sNPp}qSAl4j8Y2;6=xOZC8v45 z!fQXM9j23joI0HbkH@j+Ew8G+>qO_|(q8b+p?YYDjW(Tv?USeqQIEE9BvDDMcj3+t zu`q(Ai6h(eQ_S(Xcg%GG0*4dEs%NZ`xyO@SpoV&Tk`A}_+&;B`_k75%u6|0^poyjP z-TZDHwhG9o{Cf<5>~PA+)?U`ovx8s-h9`u~AL;ZdB=oWMBdUz~bAvvfaW6dl?AkJA zMG$P>j~-T5kbO7;6tHeB5MfLCNwj>fa#mIFC(U}zC5|KQL4=s8i>Vrr0e@=_yqm=T zI4FCo?U_S#v>^p@BDO6g!KMA!CUbE+IJj>)8=b+;q4 zv3hDfNd@CQvY5h z!|y+d&1-8@Ijhi5RVd_PZ(gB|+*2?P&rD<3VYZUA7|~l!p_s6gzG}L8rv7PDm+kTp zvc>3%jZueYF4}CV!wG4XsgfSWH*s=O>%=uo#bc}aC9ag;$GH;`%mxe7bkG1|)`C@p zFAPxsD=@mv(Z`9k*T3pWWka_!Kp1Cckk088%~*~{2}IK7i@O%go-2Jt^xxp-6wlCA z@7Hj+C!LlxWLCyQ-4uUNWboMU^3Vi#!Qc0!+WVO^!*!BKo_w@Qr{Ah~7*#Z4?S(FV zA*`n>Yvw`8!HgS$nGK+^ak1@;nnd?KO49kH%}W+M@`^EgS61IG-Bf42hJqbP@|^OT z8sgUbPMW*@B}#cMtZcV-voxftSJu5?wUh+<8q{iCITv@wBF^g1*f_#~@7>81(#9Nz zj9eP)KP|n~si3I5fpcXs^s>#C#P8$7e}qi=wK7(AV(G+TraqR<%bX~D&+jbHCr-^} zAab9`f3!9im=YJAP!{&JzfDZBcdtPCX&If|9QDu7AP|O(6-*8g>YwjO+d^9<30aYc ze{gdQ4yJFiGz9%dl3ln{)By&S+j}SUb{K1RTj7nOwXe(EmOpsk$d%LcTc?XRrY1#r>g_Rg*yNzlv{ozh1r=F z5GrXD?-F=IxpJ+TY5oBKGH|@9cpudwyt{_pCwlAo5?>(aSa``Zn0C||X3;3`aMvP#gm0)GJ=skxCoF7-iCxHlx%FQT zxKuY$X)LMAo zrSa5a=^6LIyo@pHmB$_pOq}vqHec|tfwn_(+~Uz{Q7K28e|SPEY>uajn{`-%EOK^BIZEg*tx3Gd4H!x4pz?_AIIw&!u)B45{!2JmF8d@( zP*WbSo_T_zf7ZIM!E?_qR%ah>D4zdNUym`Y-Uv%ud;UxxqR<> z>oxqAPL|AP+T?=}p11PRoNBk#-)1_vgmkO?1qu9mC3jG_awXVn>@eS+Wi{7F?{b|dC}0}IH}DM-8+Ly2 z+f8NPo^m2K)`j0${m#%SFRv)G;>&o8^3#cTi}kb9f7$%?&L0F4(Z5|vVX`VI>O}!P zlf#~odFnl~W%aTU#f2BCZ^I=N!l&(Y8@cWQ%hwC(uJ8Zzpeu%H2Xb36``})_e(x_` z0k*fGaGJ9GLKCl>c6Y%HW6LK!a;Y?P*GyYSEi{IdzPTq_37r<{ZM)Z{YN;-1c3@n+ zB<#=fwiT8^AqBh49}Zvkv21xX?*9Ju*eU*U-U0qMTD6;wk@;rXySI%U?=##FceL8~ zqFIxTFfukKVGh+x&SD{Qeyw*!Vv#=+`!mFQ16t=UVqP|(XlB!Oimocsq8VYKMO?9@ zBddI{doJ0SPLq2o{J4o~BLACCD&PE@D)8_L|LZ3bjFX_ovf zr)e}z#j=tL7Q|-K7y%`r0|MKdp{RAMcFqQtiUsW!Zgdjgm6_ZqA5NkAmjMsk}y*M66!mxbnDFj5{&F=QTS zS^gKA*ZTD4w~d{g8OZu&MRT>6$=qToyW+fHMgg2V=VBbGsNw}Ac#~;#*PhOy;AqJ9 zz{YV}3ui-jyfY&Agr-aV^B5|xea$p<^QZDB>AKT3UUIlr}K;PJ?Jvda3c%(&d1v?8QtbFlypQN_)h z-CxxET0iVG9M5_3e~u;ow}1Z67G@s02De6g?lTCLGGw@1btt%^`q0U&)mXUrEmDDs z4);5Ykt8P?c}?S%Fy}pZ%dU-CFSV7a_(O<~`BN9+ditQ=N;U&&GWE_x_nj?_6_{zQ z$LcfbB%t>gs>#CYU+tTnH7Qji7Z!)ViZf4R1dFqbq`n)Z_4r}i^*s6=+oh;bEodRF zXpHqqSz;P^^N8(IiqFyYpRN~vXH|`tas8N0f<#2>F7fMQj#Q^PbV7^J9CeG9XZEOP zAR{#>MW%_3#Rf^?P;r;mD$-eOOcQgC{Vf-ros%;~{PZVt-Oo@Tv!y%g)GjImwJSC$ zvl8nJyNcGDjPV&pn(OKyQ4%Rs7O|6RI*_#rOA)!oydvsHGWDdO2Xf{47*3#hpqLvf zB0!aKb>#2oBvz>n^DNo}RfMaPi$a54%38s6ZLQW-0sGI-fxD0tnoS7-7nQZGQ-Hj> z$W(svA2pN704x+&yN3!ctPfjDr;*GfgEdxY?V<-43GMmf(Rw@|H*s&!JpUVWmmN0i zd!netBbD_M>SYT87W3e41TXXZ-TMQNkCjw7REF(bcR#oInf?joe&fWu1-_-%(`U}O zbK%{4fzG%yxL~;Y{4Ami4SXhDW|!QohHk>S2Jtbx?0i0COs?qDBcZ1bmIsYF7-P5Y zh|1orepO7{Ytz;r5O@)IhEvI}WhM1TAf~W}a)F?bV5ob_6g-;ect^6lbsdaKOHVlSQ`4*>dm9C>SQ_+ zqsF@d_}ny@UU-Tn!S(i+a7efx+GTPVe%5;V7;TGy=gDxCA5W1GEcQfpCz(vvcY{V5 zk0MJ^>s891;w1b777ynhTzZ5D$;&It9L}HfB!2)mO+xY zF1)I9x3a$sbkE1l4q@m4>_RS(VcZv;U@XUl6%=FacEqL^BEDs<#SBc|m2p5C=mtZ#McA}t z=ptHvV;98xOHlB=Par{;Vh*bI=}^#G(ns&GX6<=uv80AtXp>j%VI*~=7Fur&KH^=e za(9RS=FPgw&uY}_by-0t@{iWR4QHiti>+3?-kSwhK)D7{Vz?Hij@2!8K)W`Y_JaUf z1>JaBwLgq}K@V$wyE0Txz;)lMLNKutwE-#Zu61wUW``{3l5UB_nPO^^3D8r!4M>Nhy*QV>4%t=*Yn zI--$3Xz9d4PvZu)PqM)Fx~vdHv#x)m9lTDV+q7jJf&nXZ$ZFcA*ClY`ZhH0}j3?i* z2fLf6{sjv!@TXB+ssIbw_JQsipvf&bFQATfym;#gm&;P}GJo_0{TJ-2Sg<%YpIK}= zGE`#<8xjWp;*;=M56Medp5R zDx{-iU<%LbmiRe0bjPDlXIhvF{vbJN;-HAN>1Znl*x<*mEN)+A`ANFI<}n!rMj9$J zI^JrE9_0s{9r8cK#I6rK8_-htmtiY=dV!kCi&e>;&N0( zbpir4qyVK9GA=kLghi2ecy4{S`e;pMYxwkAn>GZ-4bf8241jUq#Xx^hA5*#1L5OF>vO@$ufh+p0L>~7YF7kc!9_4wVW2k>I;Vzc=uZ@1X3CTwF{b+4)+!@{-=d1|j4;4_ zkNJ<5r(1cu&*vzl4}dF{w_32!M<=x~z7Y-l>S%Uc2al&w;2oC}%Ryu;^l!Bdh=vJM z$@x6rD3F>1Hv;GZdqUoJ5{T$$szKm&k=&~QbU-`G+^2tRdTmNY+xhk9WZM}6ss`|t z1?u^&x#Rf^q+Tji!d|5gx4~vB{GD~MQlbVCB=$$f#{Cah$h(zOtd@Pbmvd#OiN~SX zMJ8K)R8DNQiq-Z!c)|yDAB~tgE0LS)c|gdxP9G_o>SIqRCnnz^*CSN!Hhg?W`~Ew5 zbYhzVuTK9?P)_@pA$?gKtUn}L&&s{^6fSt;bBDp|K?Tj#9K8rU3+DVKYJteFH!*KY z#(WA6RxE&o5w`!YMT3z}51%9KyQ!MgY}rAlJ`pl(C_kWBFFzhHZyt>iT<-pXzO9!Q zBMFM*{|$|I7n3*dnU~2+ZWt);GQ#J#(C*PFyvJ?Q$*%R#n=;h=)=&QVJxJi60D8;! z+>pZlD#i7O(&&}>0S>ow2EEBOX6H@Z3L$&5Ul-b8{8x|ppJ5?WOsujeRqC0bE`06o7~sEehH|O9!wFGeO?!QUF6qVoMII3Rb4jkdodVx)cs34 zrsgvqLgJ+qDMVnMsMCZzwH$f@rRGi9$7mxS^9R~_zh~v!G#TO$wk}3FBu5gL*S_hP z34=R*mw7Mxiq`(hw|v4MyP$Gqyd*x=8LEjAu@=*`>j9KeACj@_OeWRKLJDXIZV+p8 z@V1DXWIvDtL#ZPcCoGoBnarLW=uC|1C11g>A2EPqaXTbL?1Cm%-nj9gw~braYs}xi z?cHwDorsp)rCJfpm-4&lk&q|ezFytiW_>CAR@U*w`}3345yr&9n?GzQe7B2cxoqhy z#?}wDc^p zSbYdP0(#-dpvz?)OH?A>z}bog1g@=0(-qrXx+fetO>SOSyQKI@ znNjvIXSSRvCf5Cpufe7#hAvU+aSM4+1M)X*#abpBM^j%Ba%!r>jE)Ag*b5c>&9i?b1Rz zBI|}x#`t>^kR^NKmLUHT&j{e(v2;%ebM)T`Y`VP!l@O$~HBu1ME}eG9nO4y3V75A% zCYRhfcq^Q!CmA@h0nvZ9)#~+FU-C3Xo&A#l>~;si!C`NrZl(w=-|)u6GZWS__KGG1 zM9PJ&uu@i5&<8G+aVxvE&=5pd?HxjyA5!rFbLJbZw%)YFp^P~+V#K-lQ=&hs{^8ea zry^c#2aNKC@7S|r%yR93zIpe5wf6dIHeuXnxy-yQPGHRbYPkn%$r&sQ0hDj;Li zKEb@mv28%gNUJ4!rc1|2k@LIzyA0*~3|zt^6-OXR4E=k!XQa%#!!MSffUg&o{e9$& zo8^<@&Ab@6P)nv2m0R>CPFYDStURg8xiueDy*nG9%@5q-b&5}n^Xe{WnpXnW?Z)43 z-VzOQib{5CJi*JIRd|Gvo7C{Q%Se+s*e7*bypO^m(*N6<@qhokAqHUdtd2|r*+|dg z5Oa2BmxDa8=h-QIWN)JLd(H zi*FP2*W;KSAh@si*Z7bs zI}8oQh(6Nuu!2?Ion9TAPj_ks!7)Kx^nnvf!bN&;(cu11!K`+MXVOoIk=g;6;?tNU zc2(qC&kky|a6n`iW7+t^81B?U(hA57=Z<`1CRcA3p-bVLYxtM#n8PEwFtT)9wX>QF z37>y9P(y?u%w@V1LKqWfQLg{XZC;F~z0g>_o2H*_@oaFi`R!TdmK@};(9UOM>gHK= z5dq7!Gy8Wjw`g>2WZqxB^WJTB)c*)R-R;0au?gWu_zd>Tk){w#M+SI5c&cFWD#jb*Xf1NgiT-YwhY zT~9bRc(;W)7BAOUBtRwS##s$-)+;2(VjlcH#%Ia0uq(tQqU^#ItA2B<9hI&0-Qu)g z1}e^Rfh}j(X}*My|AJdIBbC$cyadhP4UjNk*?w)J!fy4?jew=*p;{#GNNNS8A8Jx@Iwxq3Meu}RFt7rXli-1JX ze)zQ6--=v+{8m;l_>(RMDSQy&K!e5Mhu8|mjudzqD^SCO+riYjPK31kTPvKb_EcUZ zQu{CkpM5|esi9K8PVZkl_GH0in=JOP&z~7+%P6t$u^uy>WRr&hD2FQ2PmkZxycoJ+ zsdlmw4SXnE+IrW-D-W;7FSri@@bFs?GH(!x`{Y&4M;_ux#-IO_5H7O7&!*QpOQjUxrKQeC0CQR!E$o@h@ zh~py_=qod(@`lnZU@9`Nm=dfop(>rY?V)I2BcEI8j*hi)wqY8T5A5d!PzaZ(i3+C! zjoy{YS!qebfQBToZETvbl%$gG1Ani7mS^|`bV@)W1Me;I0!KXc3S}L~343l4lalPvxJh!M0Xfg&XssmIT z)Hbr*Ej(^=BvrR#vaAI1bC^1q<}Or5kAMbU_k7Izb(6EK-n4Mt-|I75L8tW1md4$9 zmvqoQP6MRQ*BmWuhU#}ltI-Y4k0G%>zn4SQa4#uC*^pB-<1XJwx~?Oi*q&;k~K zz zk6@5QUKw;QjG-<0e_8+8OEV;JC2(8e8P? zMhJs&Gw-QoH99~x$>$d1=vU#lNZUm@GU8w>$?PN@6;5b24(6)EseGj~lp_{u+@kzK ztV9l>76?#YI877M%N}k@NsYi!r9>?$JT!3ts#c?=$k#rILf$1g`^&)~V`;xK&Em~u zk~%ybnb6K`V(}F+SNv*t`dzN_3wM3=aSjC>kV9hE5HU=x)i)}b(vguJWIjIF1=Vlr zSQ)n#qOOC&KR&NJWQj?4gQ=7-yiFt*z1$69f(k+Q*0r3EU$f#^kN$pke$%^?XAaBq zk580#`b1TJSnL04%3_uHooUZ!PEp8R37#Eye_C1{aP=pUnSmk#OuvCGXAR*admfh8 zrD~~!&nECEmya98lMaf|1%My$EY?5c$A2%cKXN2iA<{5&rNn8=kk9#@>g1_OkV~y_+;l~)iXiyg?= zsBGX|z{Y{8KhSl^CO);{)hyxnyvxb?Ov_ds@Ru9G;rs>$F@aD*3ubfoa(mJg4x5N& zS!`-3H9R<*sUOTbR&fnKG?-YIX{lZwBvr)CAaky(nV1fryb5$ixHHr?D)KtG!`8Hs zoC0DM#9?>rnjAj2y`})I-KbujgZU^Tp5d?l!`$79{}@tv!x%zr(rM;>T1Z`m zM2^>KXHJ^Yji;5dv$-DRVZv{Xg82@7T!AcH2?5G^)81Q@Q96%aJ&AbMI9+2df;R3 z6svROvHF4*?YMW+z$tbsQns;rO|2MtDoJPvWF1PkB7k-PRWBud+1wS6f9^h1zk6-y zb1Xu3P0@s15Y-Uzh3L`~iYh!K>L`h1&G_li39_|}WyZpGBe%d%=hb%3H|&V`uC~~} zs7&oBV_wG1vBM$JJb2P^_Af-N#OPP=5s?XbrmXjU{x!&3EXg*%=`jCALld;jJbNGK zA;oNacY@T2*dCpuriEk~%XcyrloY$JXzW@&V>uKLRHpykXb6*~lMPgC8*tCx5`Tlp z)jUC+*N^{PG5~bP5p354{DM{QzfYsW-^}USaz{8;5p_4{y5o7`HHj^jHx)|vFfsxG zE09eDTPVs@GSpT>OUmB} zJSpLr!%sxQtyovR&e9MXX29J3U0F0T3>0?EnY8=Spal_tnJeX!%E&a9I>CNzz-M9; zlDPLEJ7EhLzdbbtwluzzQPtPE5#k7~9)AQQ{LW6#q(0pSkq1C(p`%}+n9QJ@l(AEh z7HhN@;K;d_>I1X^^$Xm-ix#nG)Go%)qN{tT%KvJhLYG(~SC@6`kjp>RKl5d65E1h| zUdzhItYPjKuZ7BRO!xY90Whun`H$8w+>YsnV&Dk8apzI$1_U$bI?v2bc{+qr+VXjBhb2({1&Z4afpoZc7m zJIriXt*P;13z_`7SyLxx(= zIKOLk=zhH0Rj!jjr^+M^#085Ero3y#f7(=?WtLJOz1TXhqqkB#qDzBGeL{KpiWK|) zS9M4a{U|d>f=3m-=I{CVicAbIy zSbyy10^&SD-`GD|>ni=Z8Rn>fYd>^j$!tRDXP-W~U}~7sc(z})+YyoGYHALcn}qk> z>RW#TJuP#4krOaf>4A<)I#q{R*yD+z!;GVxwattVdFt)>`+u}@Mx^RzlV8Ir_cFhw z)ufk%grC-Fk398>f?nhA6Hxa|+0<1LuS%$keS85tdK5f8O5--nPXb!>?k$jDzyDjR z%p4tCb0%m1UzQql@6g}UT?nO*%!zwxu~ZmoGkefty-ZWYy*IE4>ZFY=t}-;qegw<4 zAE9E~=?5Xwbh|58TdW^`8=crNqD8l?EGvKZ>?0zYUnRTMM*dQGqGHP&y!iv{XlB~Kq>dKCk_FG&}obLE?B?;*hxz9 z=Al*aA=w(~|1@y}v4A1&_Z%6u**kq}J#PJqzOP5=<(r>eCMZ0HbaoFt^YsMZJ#cD5 zz2V%$O8}l?cpc;gVDbgh>M9WzOzs5Lah!`804MI zJO?=Zwvk04pNV?o!%T0LWsmbv4Ha~+92^{sG8HvYmm9z^P=+^D-FR~wLCuf;({aMG zjdM`eebElZJUlX)^{76rtWN1)7@*040OkLjO{4rS(Cp2eoKCZf)ud3#J9Hi<84$p2FMM+;V%S`Z@AbT+4+4|8>Eam5L=rJN`V%ZGBUT9|bc-aKC&U43x!b zN$Bai;kh5u9wScoCB9Egb<8)!OKBP6L(;ejFuq zd4Hjuq*V#GuP{B`Ayea(??EzHmFpe(w39@@Vd2|h_j=jZh9pYt=Pk#xu> zrP%&VyX-jBQ#*h(bRDjxBj6tNZx^dbl@+v5V@z@u&D-p$%$H2+9{c^wbSo#&O5{ke zM}LQAoM7-4OW_}zaL$`sP+-sfGVL1aUB@z@S;P{>o8sX9)rqg(7W7Dyr}AnUnr4&X zE21Aif=shJNk9m#G3+7>xx*KSx#gCxWwe~DgX<~f}EvK&l2|7YODt8_m zkq%R!?F*mT4DaHsFD)Y@NGbb|Q0+wu&JYxxcWlHS_{;JD(uZs5@@Qvlpmm9-oJSFRI{(Fs< z@t6!>eEe2Md4s6*RLin#3MH?XI!8Hmw9`LrWh_OH)+Z4XdM8G+HoSU9NG)CU;s}hI zCdqH^oj4MTrmU^yC(5H9bAuAj08i$Wht<4sK0^>XTje*ks;I?mz}-ON)Bpw?XG+KSbS^6$(;+QWA{K zvaF^&VIeBi>t*br=c)j;JLW8N-`319$8_ooMJ`<36Uvo$0^01_BfEc~Mfg1f=ES96 zpgVc*QTT}S(&3+w__ytN6QrY`LI%^I2{F1v^8~KFE?O9SZ~$-Rw#E<%{xHgOK_d0ZPtn`4TXy=o+J7S! zN4Mk73(l5K^Xrl}b~;%Jkz?joeBw-!=R+*#<>aR5Z;!9ZzG5-MLQv6(exxXd=6BX| zAy@nm8QV&G$RJOAt6D7K`J3SHNMyp#-SZ3RDO>kRry>=^KAplSTG8@`usf@Ndl~D&6=oTVpH90*=4fWY+#piyBgC5ZCo8bAxTJiitq`9!M2Ku^WgOKR5eA`VA;pHqN#13a{{0m>Jnq1!S*0x&<7quN4t zocp-ws(35$hyKYf>*f~&T{iON%NnxQH0x+$X!-taYx%yC_hRl9W&*>!QuB}-0-G5v zb4Xa*nA_t-6g$LK?~tbqp`$vmOh`TS2bQxwkr)9|z`Vj=9=n#c0oBrKbV@X19MmNC z+|V{n!Mi1-_DoUDdaM8YB2c-u)9~JEvw#cRgI;%e1(6MXWS06JE@%Hjv(+@fP zJg97klN-b(q2j{UpS@Q-;Km}s|EI9bW}-eVpx$D2$rftTS##5dOrIZT3;dd&ehF~X z3MeqTy5;-Q!7XEB0m)dm*ivo4Rk&c6TDmN2yLB;b|1I|;=i(+pB{ z^w}bC-OK2;TO$g`%vNqni?vLD5S9UL_w=cD*<7#tH3b2((QuRjhW{y3hu_Lmk;Jnf5C!k$cc(*s>q4T4kD_$km z9dWcUj;rT5D_y%UuG_Kh2KhaG_U8`Tf|1eBV;9iUU`<3~ZVcUA4=rWm5I+vrF!HKI zq5X`K+B=@tTAISlY;PV3yJbFqf#H!2z;vBo+0F)6P~@9OSg14x1c}E>)8kTl_sEseJ$HR)(`Q2{Cu$YGDMeKS%g1ZJPw{rkzA zTgnLuTvKb3{o{}=4hi)0!37xQcj5RelJnPo@8|EG-4|e`C^$9L_VN1eHSV$!lnX9* zY`Pv58D_7Gln!=H@7aruFT2g%y&yM*L(!X2f1JCvF<_a?LXY^9+bq=S?0L1(!+%uf;G_b_<96(cKg@oM$H9_~gF zKjmC*FvUaGeQjQsdsZSZ%ZO|yPnf-{dtvP>N9|$j`4M*_wag}lF^gLh>|R7+j!goE zRdHE(Yu&nXVxk4o!g9mn9-XM7dp|CDJa~;bkC@oC39o&BuS$-#f9F<~i_YVhqvSsx zx$#@Q^2~wjGXw7NTeO{GCitREJfTNDxVg(EzMzi|TJ}DMy^_2J$WY$9KF_$TX7(*` z6e;f@EO9R1*J2Y^K)mBq=AWk+ue>?9>H(*CO8-IfDy{$G*AG?2R#^@6+32l^)T>!> zPFg*39=ZvY`4T$z(zZ4tWqlXMhrJa}L`96;83NQPN|N&QwZ}X2T%Jg?2y(MxT)p z52W5`*hjvEn%1q)SxJu)p1Vyop>=x63Gu@=Mok-n1Mt=CIHj$=WD+;P0`I{_l4#>= zvQs^KEmrk)w{KoSnnjp=(^_%8Gi2@!(k3f|<89TSn0X@qO1NMGj>Qt@$!bnpzmW!h z9RGd88~+tY*Piy>Q;+0Xp`Tn!)<)Ke!uNybWW+1FRqT@!-aKKPdRura4jZLuoKKspSZhXG|UE2AH{G3R_g)A?~vne6)byZ?@IjF*WG9+N4j| zLA(q=w}usl2)II4Ge;lGkqdc<9z9SLL&cn{53z|0x*QTm*iR2Ihu0cMwurhD8;O?^b5Rdy)oe|^m7uutwcPR4hojumvAvLu%D7|D zrpgKzQDA(W6dRRCtAA->DO2P;!b(+M7LGFBWxS~sq)TV;L4%$Ex}aDmz6&jx<1{t*(*Pn z5ILI7cTV5kpeNT2reZ~cl-gyYciT0>dORp~qIbDZKDqI{=L{otBgrwt6Za8_kh{UuqjCUy~cJWVih( zkUdve8Ggf;$qSV{O`7f-J+a#4!#brb}tZDkM#7@p~xg})*&8JxF*w-0Coxagey-Sln z7Sm4pcNLm6{unaY$#1l?ZKN9qmlE&U>RCXYHmV8k=c|MjYFUe|i77L@A z59Q-_(fW!y(F?0GY-59)HFJr4map3anBQ5tSa!?TyUW4dDEUv$%{%^UumX3Va>naD za69y4M=t&FT>2{Ds*p-;>Ed4EVved>TP#NLu9@yuJVLPmg&fAy;Sqv*ImZ+-rL}NW zvaD2P2mT2E)An6#jHJR=I%G993&8WcbR9iTrD_9xN4ovhawi=z@e=#v8IQF({<5y; zbCE;pNL%G-m?QBvc0W;3z1rhx@Ry#A)=R7^u} zbll51*`KY+`2GU9g(R+z&31bqjXn^_b3y+A@-kR^_gRQcSiOok3pq7 zLIw}RGxgDeQCTbF3ibnv-RJIfpB3|oTI)7crwO;608G*Bv7l>1bPurdSJHNpy>Xz-LwsS>^ z5Q`RLL_b*^zp$tV5TjssapAA5Vb}A&;k9y0mJPm`ab8#y@fO|Q7d&-Y=Sq0o)e9F| z%>D@5(q~y@HPr>_`(CNggw@D0rkRK(_zI5U`w1Aynw10<4gd%M^z|i@e%(AXf~hz5 zkzuIO|K9k+mFTh0vOSu)mZ{NyIJ`@yreThwb)*JI%&c}0X zCV3-TYX>NOvW5OfwZmijQW>Rg?e+<~=MUsLwXC@`tCzTS>9y@`KMGHQZXb3=T5rNt z?gYb5kBmHT!c+Iru}qEr>$p^6j?T68Gg0Fs$w!R0?iN^%oRxGhPgNbQR=fFK;BPM^ zoi@_-zkV>UK@>JR66*aT=JfY-hsUh>YxDPTDCxYO{NMi3IuB~T$#tWE(UY!cxU5Co z-!b9ZtKEPebJaP}Y=4`bn6+F^t_(dM=hIg0BR6gAcgg^}0!6wvgs zS&&t@Q+%w$vm&X}O=-Drr(s9wQTMTK{O7z}yguE{G7OqZ%}4 zl>M!HiS2j^eYdBMdGO(|XQ!-$P$oBPc65~Id~duTWFnx``fK=0HgIzvCB2TVVEMkZf^d2e=V~je zkA-3M$ki&F&%NBKNRU|v&=!Ca-*t=e+hCz^d7S8u3P;J8&VsZ{cZKDJ?>5y|n*0A~ zS#V|P(oCl*E@ah<(5WR#B>@B}9kA3`Ml-fhVvyGl0Q{r@v=I)qL9*JdjZt{sNV|Gp zc2|g!(8(1?J^1u5Bk%@u7_abcM0bTDIe7=WjOBpwLSMgaJ@{A|j+<%b&e?HlU!)Q?UE}Cvm-c*%G!)vtw@!y^V<1FhYcs|@FpZ|UUND_1b)zmmTH z{bYe}uFvl!w96AQF|&(MM-iPYq8qO9@?GLD^CBTCj=|hy2%Sx>M?O=;=vENl*VpNJ z_agUiF(H|y9~t74e-q40&QdN^nROGMHfH>f+TJ}Zrv2?7&kk)n8Ki?w!*&Q!NFvkB zwr$#ugLF_d5~7eKMoqJjicYg_n+Od;(qW`hs9Bw+**2NzFe#d5hE7vjQ_ZxR!}7cP ze4gjJp5H&u_5EJg_j7Ho+EixNdaw8UzF+t2F#bk5h>VhIiK*%1&P@YdmTj9$S_aPv7o9C~vu+I;Bw^j^E4l~W#XnBN5SqP-_W!95}M%kBN0 z-+FG1zgQ8$d02nGdg}63lh?UYQg0tXu56pNFZs{4Jq_Zkv46UaSmq~Ry;vHZ(A$)E z?V#^bmyk9Yq<8r~%1D=mFTcu&fA#zhw}+WAOH}aUwDlrk@QZIW$77 zYgzct8rj|^fFGAl7vFOM_Qj^Nfk34n?@=IH)rE~iJtaDB#Q?qvk#2zmk=Qn2Yhl7Dgjrjn#jhpL zg;fiv6yh~nM6b$x^e87rHgM5yMZ@qKm*Wh<)b57RcB?MV=#Al&DIMWgKk$*ugCn;N z%zU?dGv4y4(A36NAGbgsY>euElV%RO=Ucwb{7Y%R_2#bA4=>a$Jy`?0fJ060%0+7N z9>@9KI?<3fc6wIBidURbm6+sSDBEguPc4cUurC0EngAG+Gup4G%Nal5CrPwlnjDvm z1<*{t6Hz%b(tLWQJ#2b3-L4F4tC7pp3<0;Ht@$U(5NykO+n{$~0K2}3fFcg{lm zyvpZ%m%fe%n_iD5d{jBOH`Ts*4Z;l@QW<46X8_#ed5joQ<=c>yY4^H=z8jn)kb7Df zq>n%Te)@>qzJG%3I(ZPh>nqvTQ28b1)UfA>3%?uuHLVS$GrA9uZs;R4HUyk0aB1*4 zq=Y})D#+xr0JsC6^>3FH#Xx9hy=-N?EX;<$gGku9Pp4_W5y^1ub zk_1sP!4{W-q4$93;BbC)iFQqul{r+Lc?PN?esTkS`V>1PCi^(F_D{2Df2t_HSXWXjx7kpevcCYCw zy{!e)LZVW}68%_65yio5)JN-ICEZyCgM~OIuMbrNC5^fnPr*7RLV_e_Ie}GcXb!yo zyAq_{)mJU*(b%mfj##w7PWU0&1{-E@! z7_ea;Cl`UEb!c-ISt;khJ5`+F)rR2`V*D*tKqScxkts z2>J}u;U^k)`J$5>k%OHe(Azw+YCyfkr@0J5FOX{T=h3*3v(Fhmy_USNL6$BbBOn z&@hdKA>YmHdbI|PmlOi=tfgwC+Ku+{sE(l=kfKQ}f7!7RZGaq>%^BcaxdG^P@hDO+ z9%DC<2WKM!*E%HhkF8x5&+gy*bk1y!?Thht33tkuC98n5gQ&CgDR~1_JNy6>k)P(s zIEmz+!f4XtxVNwpnLBIBNVDI_SLI>~}XL!AK zDr+Oz6g|XE@Vov%p?@K*n@us2`SM$wbmd`H7Z3YP4xT$bcyR52I9-41!sF^$>eVK! zMHTX8Eij?%1=iumg0sO*-oZ1T=C>SO{Pg1^;BaNZjqvv43L^Hbow7u(`KYE@+}byE zd{IqTnNxv zy@)Az|BmlCFj-I2R5Tqqw3)f3aPoxy+p1x;W|)^zwodD*n})UN)4POL0oiAD-fX*w zCx4S@L&BjB44EA0muEv+t?=4)WQpT2L#()W>Txq~-cKtY?j8-*$8>9`R~>q3%NS<8 zNQr&r-i5kb1+uvPuPDVu^(pvRB}(H4toQu@C>G-*wTCat(H+8g7o1klZxEK$<5Ul7 zbwSHfBYc0e$s9X6d z(ZSh0QFn^YoQ`^a^Nbn6J!`&v-Ua41%aap*X4y@d`FmdP@U~gdiW=%_*!H;jNHxs{ z2_eO`+a|#N97XCs3o2uu>hvwH{6kr}B01fjEORxDaiC7GuOhXKq-%gLucqcG9QgGSP1zTuh7vQDsO`ZPxvdZ+Lq`&#Qz7WN46+1T%Y|A4%fTB z?MVIR4I}+9jnZ_Y$vVbA1TXPn-w}3ag4T(cE-Ubc5!+X>JYs~LX>YoJ+0C@eEC)_b zBG2D2QOaHbBsYeiruj&m=ozYFm;rC1hL^V=Enx6oMpqrHKj1YzJ$9|o+VbQ1{%1GDD7rxU5fD1zzPtFjF6Qpnot=d=xj{G zR(I9F7B(%8%}CQ0(|TU(*WBK}6yIa#Ff%!)Z|SMA5hh5wfpG{lg;GWy&`cmBY3%b% z&0YX3HsZhI5$cU4$_bS|X)+Ii)8&BWSBaaqVwgJWv~Y8ot|ZsnV+p2H12dd})etO@=kmuCx#0 z0WjU>_`(=^`=<{v1%?6L4Q+NELv zJ}xp-<@&tQi@_H~o8G8(%0zy66Q`pIyoF8b(KNI04KV^s_!~jsza-6-pgQ5{|I+@f zK}vS(GbE{QY!tG_Tlrpiwp26~KdDyK*cmm_o+8DlT%+xCo09XShksWeGnS(0P{V!E z!48llg!E+dwH^vFVeZB8#hiVU&?m2BlzILdT!8`aO-@@!# z=M7lCW4s#hssrwAULex?6jcL7Js;xi!+kwR4X{HA0BKaZBOUpSLLbiE;?2B?c%eL= z6STj}E#M}c6G-iLwhc3RGHLWrR619frSlAW`uBH(-)`APY{$CJ6%W|pv`S*Oazpk5 zFh7N{7Rw??AFD5vy9a5Fz<|S!VcId0GR@Ou4x#DlF*~orK@o ztdv~vldZEW>=2oC`-#-wVnLr;a~sM0?BDI_&-LG3UdY%;!WUi>RdA>2oN&5~bWvRr zHIjnLGB93^_SHZinh=%B14~J`eHF|c;=vHSaLX<-_VlHd-t=Orc5&6$;@LHpS?6mz zPVjzGGWr?)n~lMor^#`+YVk_5o&(pKFBgyIvKH1o>s_^{WVS{;t%F~59k{q-DY9HR z#}C2=@UL`V7&eWSUqB`~2DBH&7~dcEq$lX}=3-JTo2r}BM3YXzJ+pC=npPjlwPo$m zr|)m}mo{DWqg{?fOhElQt-fq~b(Nh1ewb<%Ts6CF3hRUihbCQ0R_^6meWM*Csu;xs z)oo!Gy0uu=C7o-gZj~~j_Pc=%7@J_^3o9%)@G{$g$}$auj+iv;G<`QSQC_9Nu&l&; zN^+72ShQ9~=X4v7d%ydVl;>Q@UBPaakSWpH?Zi8~HSpSwNG9c-LT8l3ez9swi;L{{ z)GpPz0@g>&!H+7cfp0NZCXdP2n1D?RW~Yvu14>&^`-WX=x5GEfc=1%K(&*ADu9-h` z{*VkF9w0|D>kkK45ex80B~lIiqc|z^b)=}^=z(O0Xjn-%D!KTIwJgopo(t1J(b`gEjiS+Nz zYN9@abD{=xQ+dh_X& zdn#aN2xQ9r1*fa#B1df|c*Z8%betB2i@A?0dwTYiB$jz*GM%e6yvw?^tQDV0>z;*b zwa?{mF4@uf8)w=?0zve3b2cb0UGaWptUPa|(sqq}#u*Q(R|-G&aVRpWApqm2DLre= zqhQhy#fN%%MmsBTvNVKYJK9rIV$C%BIuTj5_B^f)PKG&;!W3$S7z0e zs*FuG-fz4IUgH3w6jNt+M}1i~y1m<($$8LjGdN32WgUazKR7Dv=MODalIOnpnwU^1 z5TCNMpWJReS)$#EZqP=KDjKT)#w}b#OJA^^O?-?!6fKOrvXu*q6RQLB4z)J9wlV(J zpVmJ@LXjkfweB~<42xLT!AW?w_bd%dsf@@}@E4bHEUgA=rmdzw6G8@(eGh88HQ*_Jp zPbfiB|2kxQGsRP#=hQFRCw9pZja~*J+c-?Zj%eWe>DS^1wLUBwezPORiLP9as;8ty z66b>&i1}_~v)h6}bVatHap(z`FVs#xh>Qtw#(+gVm23QFVdm!E7Dm{H1QpT9j$j?Z z8|1gh{adZKD>vjl2}xrNO?ZDaQt#x^SbUy4ng(HCN_+a~owBq2t^7>|O8ZnGfdMf% zm|pl&dHrf_g1-2Urw8tH8yfs8G{QWKxEsug2Y;NC4nV;2>AMF#QH}SRfLZwQQ+FK2 zeY762n5%SYCgG>lyjUHgbqcZCCH7V~k9a?S4wiB1fzin!dz2C#$*7pQ>Ytm}jpPr< z)^fi`<36NN`Tk2BBM?}m9)Qmjh;?SBGW!bS=+Ld6HRPi>h-FPBhuh~N{mw|{TBkca zCTG?+_~snJ)i0^B<%pl8P}pyTDL~BvG(YSJcuek5CPShVdKKYMHloQdx)Y1`!D9>`%4@T~*`&Ls+?aE^c8IJ6?w3!3^ z)jDfzfTJ>5zSUV^bv~Ll{Yc&2Ubc;P%2TVEf13||VsebMKOM7+{CTkNAU`w*vQ>}8 z7|SgLw8^aL_->_#3d<(=E7AAkI1(vFkAy!(;& zDe&?=&nX4e_t6&d8n{QjGqZSM!oPJ3rHaM$M*USy7tDcGNU`cpUKg>~jr-3TosHpL z#n@n%cOeSg$1OrKW+-_ToCI#zu`^XlywyEuT@x`2o{!(>eSqyfiDEfR8yw$1Z2a(z zdD|+J*lXmtv{s))inTnnoK|Z$tEp@UF9d9lfU95{>er=yR3Nj1$^QIu8Erb!(sUQU zDju$5U-4x{-ja8v>LQe9ywmHBQ-X69Rw~(|l>EXCL*YKPg`|5ngHz?v^?oMk|hJ$P+TR&E_i_8D|FlMiiM^mf^I1Mi%E z%(u_m0X*&6pl3t-c*NHjnxQ6X(VJED%1@87%!7Ajc=atW`&n)?TGk}xgCsI=8I;mM z-Sz|N4g}pnS{#$NGMsJ3FCjj-T9B2!z@QJuqXC3ctCMaVen{{8LmGCd9hO_Y>9h z0}Jja8vN&e=hPbw+5H`lb#jG93|(mvQ4;oqbpjV6`yxW9KjUS=kHYg+G=>lmy1D!&q`@(twyc56 zLscRdU35y8htVZXF}b6@N*Y7^OjkIOuw|K5XI*SzR0PStg9>A`BdXDE*`R&#cZ1(m z8YE+xhtBa<5;-x$5My&AfcqM15>(?VQ5#_&F~CVy+|&tJIr+?-K3DExtp_;(O&EoZ zJr-XhgFzWVVZITN4{N=67l+-+m1>&iPv+HOu?_-cvU%j0H@vgA{k5Qp_IuUW6+u?T zIXht($53dx*)LeXT#(d69m~HL2CW9@fRzKI(LQwyTlRD&A`@5Dc~;xa(jLreSH`I+R=x^;_6Ux>qs?bV!^Lnq0sYSPiJN zbc=yIjWeXtbig~Gem6MQ?Ut29`*r|;Jz6#5_wCLZ#?&Xa1{uXHf1bia&QuRTA7&J4A)u7|8G=lpI4 zmNFz=CubOkzO+bo7hX21mR3%D$-U$@9r|em^D>C2;vj)xg%t(0C1M^HqDY$su?T+ zjE6uNwNYDdJ4NYnkucEhds^OHwgb5f1xiQ&9CdMPq1n5ua{*gSrrk=62Q*=&2e1(n zfMAvqbDX|-gN4~9%LRJx)YnVExaGS`}|>^JIq ze044}pi2^~X##AMm@=dpY3Cm{Iow74<$E?~uUDcfLKrTs)RgCeyX?7OG>n4|h zei6P-B`Q#)Psz>%PrcVt-l!$WmAoj(7V(|bsyC*pO@*lr<2D8^E#2bp8kg3%}vcN)=0y599zSP=3ZhN)!g3eNETvndpjK#$&YeY2q2lu5q9eTa^cA_P9L)d zdkM%)?ieox^u{2w9s`KH9TXm&^mDWzG{tUNyn>FQ)rd87u%RkSswL@uCXB?9W4hm? z$iA_c2su-x&@fIX!}_?GHl!YiXFT=kFNC$=l@FWPPc#8H1Uw-W3Nn5>8rXubMX3Hq zFKv@{GCixulQ~qa9K)*>@CgBTf{NjXKf zg?)HtE;AHR@zPUH(@7UlEFE7bMK6_w>dWZ5pY2w@+ja2F!3ee8wT^)VV%d9vaLjHe zCugr||46f2GdvVOW~syUlha8$PmfxO+8)NX8*y=$P&dPrnDiN|R&T(>mjEF}TFwZ_ zlaT{qRy~&=F@vEote{z6`b<{Yv7@cez5)?9xCU=7m?!l3mb~>L`a>g z5fdG1DHbrW1lA@jE2Ye=WWm9+dpB9`ahxZ;?$C?2hMC>e z?*=Gkw^-pVuvY;GuwYfr@mqy@cco7?HMm|)j-1p#TR9}#mid@@ zOP&C5UZ6C`gyF-fZr0FZDL$~b2WfpO>`o|WDeYeISW7Y3y)XLIAlQ}erOey@@bj!Y zB=}lKovRwfeb64-f#RXwLzXPZKp&IXD(Qk972wkVdg!>CTpw)!luB?o1^6VQO;P&L zgE>ocl55sYNE2D8a$}C328T@;vpBz@=1Q8?g8dO&L?9VL8lCiYM^xV54cmq8r|6CY1?Di{I$_2Pvx<5kBEm1Oc^%}GuP^QsRXBXcLoX4sjR z7))V>y2hhY5+&v{^GoV79c&eq&(go0myYu1F+(f(E~pZXrMiD)ZC18tX5*q7tQVPa z3S#t#e~7ubH6|_dh&C{&lOjpf8F<~j1)`M+($bd{=F=dpiTz{)(OWlcRjt97*eiXj z<@5PTlQyl+->5oDcg{?y0;>^Tu2ar_u50s&5ASK}&`_f8^7BPv%{vPdwwHXJt^cH3 zo^KDoKjpApAT6XgX*UZ3^8p-Tc-OQ|R{9q9*>IeOO-NsBjM;%twpfdTjxj|sJSm~Y0S{S4zk0wV*%Q_&)02Uwb zGHvjSikF3UJe52<6GxpvNc^sYZBy6pSZ$KBu!>K@Upziy?(HYMDaFe+Ll{{#o5EeY z6v;f>Rl^`f4p@Gc+u)DobFL1Og#$)M8pf86R2v=su-i4m3_L6DwZJ0?z{!si@n|r{ z`=V=`lq$86n~8kd^d7)H9-AIC^NWyua_1>zW%LE*9`_BrrBh5428@@CF@ruJt3d(M$`qYe(JyZ4!KHV zLM&j~4ZcTYyJdTAIhUZM+j`pYc1jzYywEWFb`%_iJ3a+nY1-yCHM&bVms=mf_}fS~ zjU$^7e^vXNj*Jnvkl>t<5xyyY1uGG_o{qZQ2vgs!dKl3gldD-Ba!x{qbq8^irx+O; zx6ZI(&iF3!S}y&IgUV@g;n_oMNi+cG zfyW02h#sP7oNk|P16s;g(i0sWjWk|O>H;K2kBD~kFcwMZUyQSbHUJapX@B#IXovI6 z<<(?c{>sCf*X%*6XiC#sbMn!HgI`difMKF6>NM{%%WGAv$4lyWgRYVgVzn^vYwWc> zD`{>*#TH2NWDL17PocQnqjq23$HX_00Ty$2G|r2wj}TkI8XT zQ^ZK|sU@BQTgS59`Jr~RDG}Uh3W;(X@-?RoQ`yUrR#d2ozn1y=h$gc5PII=5G;C~b zEgvJQU0ViY}cMGtV3y$PGb55v((oI=075kYFvXgm>+sWOu{{-fOahW326$B4_2 zR~-6h|4`_cv~2aRh`x+RF^N*&{irm^@DC|?B3TgkBDO&3MF44{82R*Gb6&YJhAi+K ziDj86e~^2B&>4Z)nqwK7cHLF-k!=d-@==&>-^Ksn{sa<%F^~Q-FQ~;4-S!~c7>6?z zb`E;3&Wjh?J`3jj1JM9WXBz2}Gv>aA zl|@UXY-}#5lD~Mk3C!9E703io+>2;D=5P3hZssN_&14Yg$K>AEQLi<$WJZw~Fi>oy z7LFui!nz0S4wp$klYEtv_;xekSVdrU4+V*#dA+7g8AzO^&mc^ouy3{6nx7qd53jCn z?b6LzNmFiwieTeCL{JUeMOZ8c-};JKfe6lW8alZ~tPT3VC|+kHk3A;Y==_RcJyPDa%YI~^Od;od)F*&4!4$9SpigJHj!j&$bBIMtqSxA-5^Yk7tUohzI@FgT zFWLZK7jT=2Y;OaGXH@6)_SsY$qnejWrBn6&_LX5Ky3JHGqs5d{UQxlTl<)O+xLlod zw#atBgqRnl%1;3FRXb5Z?wYBmy9^8aMx2h1$)9EYGmD zc3c7ttD_kbwdoYi@wD7M%-9+$bsDcbN|?Ya@)wzfO5n(B85>|;IUF6XNM~5~_|c%= zO5_&y^RHy~xRI)Fp=P0WcKm_@qT<6aif@4tqf7$Rqf|HYbmqDeyqUcGZ6-OlLy13` z0V=9T4#$wzTRphuBdxm(jT?M%$al}m(6}o3WAT`m-Zvg$Hb$hSFo>UC!5+g<-)KBF zV5Wn6e~InVLoVcVI`m8Noe}`BoDW@5&Z*%}U*R-8ly8km;#f;!WwT{yLNh_4I?d*! zp0oBa@M)1?TldZpF43-FiquE-1?VB5kfLUMiyo>gT(^V{B#xNRWK@#PD7!I*kJ9b} z#Z~ziGb})9ubU5|HIn&0X0_cU8|FX_+o``n!XkATbAHm%Aztlqs8vaFo=aF<6fL3n z&AlSIaDAYq^on!W`Ot40QwbX8?lv)h~64@dE?WbZXbh8ai}eTXY8j@inlim=gR ziziT`c?^YLXR5?cQ(Ro7KeB`u8u>g$MKzr%HJWtjHX~KW?L%{#^5Qg4{=w3!4kCWd zE!-y(pct!t;;Z3+9#dR7`}$|Bm4&q!tLHTw-`5nVze%Ag^OPxnEBPCN-bNi-GLB9OfMVOUu&DQzq|DxJ3Gdq9{a*jfeitRPR<-{myi6f0jqTRFK|opt#3Cc%IYocz zikz4TNDnM6h4bYd72S3_%B~4xoJS9Y#Y&85$H?Q@Vh#K9<4`wowTKK;*WWT`HXfg+ zoR|^0)hNJ7dwOQ^K;fQse~am@ODel;nR(B?{rom`(_c!E#jDl?dUJO{LE_VNjA$aD zcS`FzHtqEd%*Q0~LHE}(`z!mGag39g{?d$@d!^xi#qrfharv33^5d!(`Tzj#|CwN;h+@KJB`&%}p8(cBO>58FwG?jEbU zTDq+N+aR2+xKOcgP!kz=@&4l-yvxk%uVewe?Wbv6rSDsBw`Y0DIsFQ_snDfX?L~QL z(C9^2uW`fvE0hU+Su$Iycyo!;R&)+MynEuKqat;FRv@Mgz#U}n#?^nQerPcPSB0MD zh>YUVt#Xc$ziunWj+NQc2o_tCL>?M76%X=S$@nTyX?~dmj?G|htFcE3|K)r3WY%v= z`o?+Sd{NnzcpF zuWKNNa)v)1Aldk`$+3twdQBz5(-DphiRU+3=Dqyk%oCkIaGKG=6RW^$pMZA%+ZX|Y zCLo(qe&Zx%%njdgMzzP$>$<$KuVKaCC(FhTLN6d8fzv2#a}XN<_MBs&K4XCK4OPv6 zy6l28{xZy%4d>dkCAC){^uD>P`?a~4V}$n(sxe5pf}pqv0i%ThxQQ<)pYG2;vF+FV zRHf78U~@Liu!EtjA>+xwLG`?XH>| zpe+w}(HDo!O^!JpC9XaEYzjkdYB*m)>U{2!zHrr&0-{=F6{ey{V43pbe&Yb?y#?E1|T0Z~s{UfLq#RayMagAwmp5RtWghzd3f5aY) z{|d^alOgQ6^_11Jz2kN6_*&^R5YaB8m9SiZ){v0GcO{SO_ySP2r1}KqYzM8=&hhm! z-=97{I>>qObafqyPFk890bKTKt4|aL>_7NY&)aQ8tn(tcY+a(kB|n6ceV#qy#r{ zq7w?2LAtM(uogq#^{^ha)ti<{Y%(IgIeeiTF-LY`hcffm=J~bn4ox@div3ddK3zLb z2C-aokl`h_BcyfFzuMsyU5T0ob-ccia3I<;=9XezLbz~N=HO#)4G%72gzsZn6^IM6 zk+(1MyXQy;63e{gVLt{{Zk9sz8y5|5D7eni@wJqR6xi1Y2Nb`HN)7Rq8lkV!HsdkH zDHFnCtg@;QLp{2upu9j_ye^w%BpgH7cb>r{y4f`Bir(nFJ5BnNy^tFo_xI9WgIv|= zV#WKrPsM`$>4gFrWEV4bN9WR;14@I%X7@wGI@=e>5?eUavcsR#waIdQnqCadZiQQ^So#EdnA z?#OFiOIR$QRuV7j11b-!FW!(QCOW)_@di*!nAY?VboI)6W6Z`@$>EV^S0oI`!q+)H z4aA0gK7>IaepF+YKO8p2C5?`Y`AslKw~#P~dK8RH%84HSv)?XyuV%csiyS(EP77qd znO7xWDnT<-34RWAACGevYQ8?f<-P^+-`XYHUuu_#v*;CicjiEQ8M&$uww5r~24(y` zDB(>%`x8^ai4`7;EaWY@CVd24YvwAz?Xo0ZXv^&Xdt!ntC+5sncn7HxX@gErH|wH; z5}8Xa56z9a-`m{V3<(qi_%$vAf22ob50v)Aj0}~uQLxFS9J&f*tl4>tdQqRG`fu=! zDmqNa^{~En^5J&B%yLrt)Y7mUy?#Uiynnmr%%=saUUxS{g*s++SX@#`jF|Kq!lwK_d|GJG>?Yi^VqU zIq_|d?fDw78Y!B>Fir$y=o6&bsrD=!MJ|7ueO|uUC*h-4$R@sjb=g^>v@>lp{PUB} zGA1gi>r`IAPIfX&52u2x8y+d($c>p(wgjn8%e^on0eZJgeOgL{FRCT!g|E2X&XN!O zQ$S4tQqy+>%Q7Q~ox=(6kz-uSMeCgw51`Lf98#zy8B0;LTlY6ScYndN{!r$X%S<_T z+|h9oTu541LuzO?UrAPbrD?XJ8mDca(RUsVsl@|2-MSbN6eMpz^e0jDT zm;5PmLxvq!TcQF^BRqbf-xKWT+|6Zqg$I#J72VOuWgX7j03M|ya)xj#vI9$~2pe~V zHkCiCwHs}#J0=TD5i#FujTT)!Qt9fV`egB^%(?7M zFaE@}QmTTeqHe3!w)@S^pTk~$W}gwV*2YOc@f5@C9Q{Ma7Ty>n4SPTccmei40TYmR&VdX?U^;cA47dMrq%rg`@%r^!97HEM$W zlr`^oAXagsqu|`_VAxiUXn2QpLtK`MeD@(I?62j{e|R)-i1MwaKYTi+KM{_f4Arb7 z#Y1rWhw4hUf)rmZg|&CGy%+t8pDr0pT~xj&z{xZzPY1dY)5|r(+kCILJ@Z>~e1tL} zv@^ugY5+Y+s%T#cZpWrf2SJo$_C01=b(G*QzxXuX5x+798hezjMuG)ezX$oF#5CY;NVU=_X`cX!-SpbZqbL)rDZLvjBm>g&jfM)?l^ED{9Ndu zr+Nst&16Fk!j`x#{6>J1jl|9K+oS|1ZJ zE?yv8TAIf$j(cuu`a~Q`_+|E#?k}F~MQMn0PG_{3(ZfL>}3njbRhUuE*<{^){s}f1nYd( zg?$xXDvH@zEwnH{<6Y*92%Lj>%spB;v53bw{(_geV%3s&9Q~cJ4-Etj1(0YKxvDJ8 zOS|Eknup&w#UCDq?Q5tNy*@NKZDF-+VNiF+(xPSD0mjG*cJ_7LwBu8d&p4XNs+IgH z3ED9-E+AzVcv=}x?u7O2kk(mC(}be)?Y5X{X!-_DjxH2ux|sYd@oV04Mxu1D za5k)ry;4~LqzC==MHk#AU$ZT|eM;CtSAatggP0VA$?3|p{$-DG$&TMu#$@P{NUE(7 z8;8ctuNJW9UL~fXCJJ}A&(I*4oABwDH!@0jJ*S(uzq#PtAW`;n1G0J;@7Xf|E6-`k zJv6_h#JBIgelN(yjW1S7=XDXPcmb{UGcl*8Z_2uB!KFvJ3D0s17glyG2gmxAU}S%? z%>G}~)`i7!;Huven(I(Kzn`6+R}A2+{x@C6e-fh|``Nm7yiO`1g-R?U-;hmxcYp42 z)ttRT{TLnl)z)}SRl}PDo$Rcxwy_B23zu=RdN;l(`u|aUnKB77V0ZH&`!!aS zs0)*UhccF+bH~O?XPZ=@)L*n0Uc+d~XehUJ-43OFJaUlqaY((Q0I0;ai-xLbQ^7r# zicIe2no(U88hYgdmiOr&cTg-PCc34B8PRSYK!V6aNVVgRV_Fl_5Zs3`?bk2jzUMiR z-!#SnY3 znL&?Zi!$4E?jKZZ!Sr`sjC`|oe>AbMpG-`J333=G0mXMBZ%4&7+(CNi7d*(tcMED5 zE6ZKEJ0oZT$o3KiGJav=tD5c%3#0fw=(i-Zi-NN4~hjA2}t0fMFSYVv!O}8h?UBo4y7gL~U2lF7sV+ z54knZkoEfq%GwUejNQr@DaQ)R$Ajt-!>lenEk|Ui+uuk528t`afxlvJ2!8?g?M0?U zaUBV)M+yK((&1rthDv*093nLvE7`vrj4Wru<|nhS1Q?bDlqsk-i5Wo0DxK^;2gv1ZBwDu$V>lTahz?TQcAq4Pde6(WqBcC|2aUwW7{flp>U<}P*`{``; znbglNd8j&p#~8Yz)93Urpag}>c+d2=;q$#i11-_R!}(SN#H2jTXj@6iRjIfJ=9k@8 zuNX10#I%@z0pCRDcOK+v2`Rs5f-qh=5=_VE2iK<9|NcRxMQoTpwTz=;T47kO>S8_1 zam8mc5er4uMP980`!hHLmRttXi$#YOW^+Zjw1`@TKk2vXT7IAWm%l$sZgb_iwk6|G^;RuG5*?e)NwI|F^ZPiokvn!g+HWAv+d z&nhrJ{rreDPOUB^4`!d#oDpzTokX37R9YkZ zd3OHqUV2oZHah2~w1Hl`AMg=5vU2U1{cKHyGN)3%N_3p5VBE6m(oKz!3lm(N`bn^$ zixMvOpDK*WH6|P54bpezPeQmKZ6e??wU5{BqAly7MG<^zrIrit!S=fWMHB7C>&rO{ zV{M5J3$I1qg>DI+^rf^XqPZe4rbPcMFq;_q^>%xOd$wNp{F0fzU-2HuD&|Ut=9-Ep z3dkA~YspCEYW6&qGcLq3YhkWMSu54PVoU(h1u+ZA?11%$ODmIyg#6VuR>aL&=MlMl zlyHYld9|&{Stqqw$*7^u;XTW(a^KSMid8DxSIF&{&~|Ab&+y9ooY;z;#a1{kFUskO z^hECHyM^xxr~F*w`F5EKiCH&E#TWtH;;1Jw^i$;Uj`cCi1`pYlK$m#&eC=BD)(31e zDcO{@8nQ?yFte&HpaGk6JT3=k2835C=U+UDO#P)h%I8(+RM7bzk2*1B&6I>TF|YV+ zXU+tnP75ydoKeN?Xz^7}VtV$kSyw+^;bat60XUqrJ%`{mS6M>KsOh zmG?uQ|EDA3=Ox}*edIuB>S}N0B5z;c<@QpG2S;iU8~+}^u(ofGi|g{NAjdbs7ewq^3kK7(PV2VOj9 zBocLM6d3o75N-)06an%zTc24qv)1$Wz9YfxH!Vm#SYXM<{szE}9HicWq`9-8(15uv8CsJfUroLxsf$On`Ck~@ed+7 zxk?x)vcV>9L+n093J}XE@_8s)jiAwC^Z*qS<7Y?4hcCni<9-d|m$%a`y zDC!JVXR5d8OF?VBPU%{W#*H*EHwznjTT`zcIV-!A3p)KB$fK=Jw^p1p)V`O}rflwN zXTI#1#@x8-Qw_JVY`IL-_m*c|HTur9rTO0FPzPYNbuQemr0W&=76XEqBS+b5WFT;P1cwKS%=j;eYuV zQy4W?2cPF1IQnggpbmuV4&xfK?6_It3`qu?1_k5^;x*yg=vaJL&z_00T3P=|cuDgw3zUGE70ZZL3TkK-Jo>O_kN zE-P|hj4l@*)N~Xub4|iDW4Bxkfak$NE9{!M+eFQlQg(jIJv3X}i z*lt63VwVmv8T-vCq1CoI@3C$r{@pX+iC+*3)$AW+h+=hw5Hb(B>V6f*uRdnr=2 zlZ4=VOS@KAn!$tR6&#Dgzceez5Y)hu%Iqo%?NZije+Q_f0>J6Gt3jAjnrXbqGd?nX zQ9Bd3lwg?_&hV>(C9@CwiDWRbpX4D+S?8>LV0=M-%&4l>k`-=2|IGyjQhyl0` zJe?=Y27dme&IQ;R-83zor~^AJ zpfS{DkD(-k%3+L!5AZ$Qi()u|fZm@d2qgxw5}Gw(1|`;tBE%2|@mLa!k35p73l7tU zx|BxiFuUw0Ay6U}gjMk;Rn|r@jx!C_5g6^<3R@=+D0bq8Z!!9~xU!t6uHTT2kEkl- z8nIXsn9wi^>Uz8h71@>bAB$#=*w!hg91M~(LCN+FNSFXpuxT9FgQMpu?mKN zIuTS_c*)s{Dx8_VKj%Fd5`l+c0n8l485YJtZ~9o|u<4Th|K#Z~ZW|vip8w->SF%AVz{;ap5nsG~73%>3zIc-wj?7X0a5q z9AvlSXzN^Atkys%F*+3ys_t~5woGuEAR9=ojIraY+nIH!~wLxscgcN!^G_OIu{;(mDoUsvd1BHL%Hhj zP830(#lTZAKot8BRnK8{BXP{tkJkRxHT@xl4F+k&CUy?QelG8N5`A8a<}LS%O9UT1#_LuGkT5_E_=k@9@G2vP_wh;LVJ(TBdjmD9ul-YL7bUX#D=TJqR{jbw|)BLt9 zKV)L5c#wLv#?ml(be6`a1rqe`p5I!fg`&Ydx(lVhG9lo7lnQ8Z1ChS zFU~y6i>;6z_Sy8^V7KpEmMC{S3oPzwP=Pm{nD zYzFHZ)F+^PFIIBjB8DbnL^URr zXfril8z}7?O&9uoJ`NYtyLSGuFSE}DHo)sq>6hU{>2+Gy5_tUiIj6%^_T98=!QRU` zPU(mK(&*5g+>ekYp;vkQ`-9KI*}c@P(Ov20M+3DKds>%5Wa)BmyP!?~UcV@$dv(?S zawz?82iE`1-%TFXvdPVb-S=PA{p+5@%-FH^t|$7HqchUHUmxt5B^v%gMJMf0(f&zv z_+|0?^)qIU-M}AmcdS1(^IET9hR>=AGj{lSDhBnx6u{*;=ksJpt)u15@Q!q20mWrg k44aW1khmbX&cqUF Date: Mon, 12 Jun 2023 13:36:05 +0800 Subject: [PATCH 21/52] [Fix] Remove list `self.layers` in hrformer ffn modules (#2449) --- mmpose/models/backbones/hrformer.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/mmpose/models/backbones/hrformer.py b/mmpose/models/backbones/hrformer.py index 4712cfdfb5..0b86617f14 100644 --- a/mmpose/models/backbones/hrformer.py +++ b/mmpose/models/backbones/hrformer.py @@ -299,17 +299,12 @@ def __init__(self, self.act3 = build_activation_layer(act_cfg) self.norm3 = build_norm_layer(norm_cfg, out_features)[1] - # put the modules togather - self.layers = [ - self.fc1, self.norm1, self.act1, self.dw3x3, self.norm2, self.act2, - self.fc2, self.norm3, self.act3 - ] - def forward(self, x, H, W): """Forward function.""" x = nlc_to_nchw(x, (H, W)) - for layer in self.layers: - x = layer(x) + x = self.act1(self.norm1(self.fc1(x))) + x = self.act2(self.norm2(self.dw3x3(x))) + x = self.act3(self.norm3(self.fc2(x))) x = nchw_to_nlc(x) return x From 768913a135d6e6ee8f2dec2b7c9d57b9bc5fd86b Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 12 Jun 2023 13:36:31 +0800 Subject: [PATCH 22/52] [Docs] Add skeleton imgs to illustrate(#2450) --- docs/zh_cn/dataset_zoo/2d_face_keypoint.md | 57 ++++++++++++++++++++++ projects/rtmpose/README.md | 16 ++++++ projects/rtmpose/README_CN.md | 18 ++++++- 3 files changed, 90 insertions(+), 1 deletion(-) diff --git a/docs/zh_cn/dataset_zoo/2d_face_keypoint.md b/docs/zh_cn/dataset_zoo/2d_face_keypoint.md index 17eb823954..62f66bd82b 100644 --- a/docs/zh_cn/dataset_zoo/2d_face_keypoint.md +++ b/docs/zh_cn/dataset_zoo/2d_face_keypoint.md @@ -10,6 +10,7 @@ MMPose supported datasets: - [AFLW](#aflw-dataset) \[ [Homepage](https://www.tugraz.at/institute/icg/research/team-bischof/lrs/downloads/aflw/) \] - [COFW](#cofw-dataset) \[ [Homepage](http://www.vision.caltech.edu/xpburgos/ICCV13/) \] - [COCO-WholeBody-Face](#coco-wholebody-face) \[ [Homepage](https://github.com/jin-s13/COCO-WholeBody/) \] +- [LaPa](#lapa-dataset) \[ [Homepage](https://github.com/JDAI-CV/lapa-dataset) \] ## 300W Dataset @@ -325,3 +326,59 @@ mmpose Please also install the latest version of [Extended COCO API](https://github.com/jin-s13/xtcocoapi) to support COCO-WholeBody evaluation: `pip install xtcocotools` + +## LaPa + + + +

    +LaPa (AAAI'2020) + +```bibtex +@inproceedings{liu2020new, + title={A New Dataset and Boundary-Attention Semantic Segmentation for Face Parsing.}, + author={Liu, Yinglu and Shi, Hailin and Shen, Hao and Si, Yue and Wang, Xiaobo and Mei, Tao}, + booktitle={AAAI}, + pages={11637--11644}, + year={2020} +} +``` + +
    + +
    + +
    + +For [LaPa](https://github.com/JDAI-CV/lapa-dataset) dataset, images can be downloaded from [their github page](https://github.com/JDAI-CV/lapa-dataset). + +Download and extract them under $MMPOSE/data, and use our `tools/dataset_converters/lapa2coco.py` to make them look like this: + +```text +mmpose +├── mmpose +├── docs +├── tests +├── tools +├── configs +`── data + │── LaPa + │-- annotations + │ │-- lapa_train.json + │ |-- lapa_val.json + │ |-- lapa_test.json + | |-- lapa_trainval.json + │-- train + │ │-- images + │ │-- labels + │ │-- landmarks + │-- val + │ │-- images + │ │-- labels + │ │-- landmarks + `-- test + │ │-- images + │ │-- labels + │ │-- landmarks + +``` diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index 22c671cc6b..50ce59e16b 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -158,6 +158,9 @@ Feel free to join our community group for more help: #### 17 Keypoints +- Keypoints are defined as [COCO](http://cocodataset.org/). For details please refer to the [meta info](/configs/_base_/datasets/coco.py). + +
    AIC+COCO @@ -199,6 +202,7 @@ Feel free to join our community group for more help: #### 26 Keypoints - Keypoints are defined as [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/). For details please refer to the [meta info](/configs/_base_/datasets/halpe26.py). + - Models are trained and evaluated on `Body8`. | Config | Input Size | PCK@0.1
    (Body8) | AUC
    (Body8) | Params(M) | FLOPS(G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | ncnn-FP16-Latency
    (ms)
    (Snapdragon 865) | Download | @@ -225,6 +229,9 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### WholeBody 2d (133 Keypoints) +- Keypoints are defined as [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/). For details please refer to the [meta info](/configs/_base_/datasets/coco_wholebody.py). + + | Config | Input Size | Whole AP | Whole AR | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | | [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 58.2 | 67.4 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | @@ -233,12 +240,18 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### Animal 2d (17 Keypoints) +- Keypoints are defined as [AP-10K](https://github.com/AlexTheBad/AP-10K/). For details please refer to the [meta info](/configs/_base_/datasets/ap10k.py). + + | Config | Input Size | AP
    (AP10K) | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | | [RTMPose-m](./rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py) | 256x256 | 72.2 | 2.57 | 14.157 | 2.404 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth) | ### Face 2d (106 Keypoints) +- Keypoints are defined as [LaPa](https://github.com/JDAI-CV/lapa-dataset). For details please refer to the [meta info](/configs/_base_/datasets/lapa.py). + +
    Face6 @@ -260,6 +273,9 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### Hand 2d (21 Keypoints) +- Keypoints are defined as [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/). For details please refer to the [meta info](/configs/_base_/datasets/coco_wholebody_hand.py). + + | Detection Config | Input Size | Model AP
    (OneHand10K) | Flops
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :---------------------------: | :--------: | :---------------------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------: | | [RTMDet-nano
    (alpha version)](./rtmdet/hand/rtmdet_nano_320-8xb32_hand.py) | 320x320 | 76.0 | 0.31 | - | - | [Det Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmdet_nano_8xb32-300e_hand-267f9c8f.pth) | diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 57e00a95f2..1d33bd179e 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -149,6 +149,9 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 #### 17 Keypoints +- 关键点骨架定义遵循 [COCO](http://cocodataset.org/). 详情见 [meta info](/configs/_base_/datasets/coco.py). + +
    AIC+COCO @@ -189,7 +192,8 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 #### 26 Keypoints -- 26 关键点的定义遵循 [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/),详情见 [meta info](/configs/_base_/datasets/halpe26.py)。 +- 关键点骨架定义遵循 [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/),详情见 [meta info](/configs/_base_/datasets/halpe26.py)。 + - 模型在 `Body8` 上进行训练和评估。 | Config | Input Size | PCK@0.1
    (Body8) | AUC
    (Body8) | Params(M) | FLOPS(G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | ncnn-FP16-Latency
    (ms)
    (Snapdragon 865) | Download | @@ -216,6 +220,9 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 人体全身 2d 关键点 (133 Keypoints) +- 关键点骨架定义遵循 [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/),详情见 [meta info](/configs/_base_/datasets/coco_wholebody.py)。 + + | Config | Input Size | Whole AP | Whole AR | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | | [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 58.2 | 67.4 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | @@ -224,12 +231,18 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 动物 2d 关键点 (17 Keypoints) +- 关键点骨架定义遵循 [AP-10K](https://github.com/AlexTheBad/AP-10K/),详情见 [meta info](/configs/_base_/datasets/ap10k.py)。 + + | Config | Input Size | AP
    (AP10K) | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | | [RTMPose-m](./rtmpose/animal_2d_keypoint/rtmpose-m_8xb64-210e_ap10k-256x256.py) | 256x256 | 72.2 | 2.57 | 14.157 | 2.404 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth) | ### 脸部 2d 关键点 (106 Keypoints) +- 关键点骨架定义遵循 [LaPa](https://github.com/JDAI-CV/lapa-dataset),详情见 [meta info](/configs/_base_/datasets/lapa.py)。 + +
    Face6 @@ -251,6 +264,9 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 手部 2d 关键点 (21 Keypoints) +- 关键点骨架定义遵循 [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/),详情见 [meta info](/configs/_base_/datasets/coco_wholebody_hand.py)。 + + | Detection Config | Input Size | Model AP
    (OneHand10K) | Flops
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :---------------------------: | :--------: | :---------------------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------: | | [RTMDet-nano (试用)](./rtmdet/hand/rtmdet_nano_320-8xb32_hand.py) | 320x320 | 76.0 | 0.31 | - | - | [Det Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmdet_nano_8xb32-300e_hand-267f9c8f.pth) | From 75e8047e726f9187fa14c3c4441fca961f28c411 Mon Sep 17 00:00:00 2001 From: Lz <2120140200@mail.nankai.edu.cn> Date: Mon, 12 Jun 2023 13:59:56 +0800 Subject: [PATCH 23/52] [Project] Support SKPS model (#2366) --- projects/skps/README.md | 83 ++++ ...rnetv2-w18_skps-1xb16-160e_cofw-256x256.py | 176 ++++++++ ...hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py | 176 ++++++++ projects/skps/custom_codecs/__init__.py | 3 + projects/skps/custom_codecs/skps_heatmap.py | 164 +++++++ projects/skps/models/__init__.py | 3 + projects/skps/models/skps_head.py | 399 ++++++++++++++++++ 7 files changed, 1004 insertions(+) create mode 100644 projects/skps/README.md create mode 100644 projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb16-160e_cofw-256x256.py create mode 100644 projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py create mode 100644 projects/skps/custom_codecs/__init__.py create mode 100644 projects/skps/custom_codecs/skps_heatmap.py create mode 100644 projects/skps/models/__init__.py create mode 100644 projects/skps/models/skps_head.py diff --git a/projects/skps/README.md b/projects/skps/README.md new file mode 100644 index 0000000000..13e8c4a7ab --- /dev/null +++ b/projects/skps/README.md @@ -0,0 +1,83 @@ +# Simple Keypoints + +## Description + +Author: @2120140200@mail.nankai.edu.cn + +It is a simple keypoints detector model. The model predict a score heatmap and an encoded location map. +The result in wflw achieves 3.94 NME. + +## Usage + +### Prerequisites + +- Python 3.7 +- PyTorch 1.6 or higher +- [MIM](https://github.com/open-mmlab/mim) v0.33 or higher +- [MMPose](https://github.com/open-mmlab/mmpose) v1.0.0rc0 or higher + +All the commands below rely on the correct configuration of `PYTHONPATH`, which should point to the project's directory so that Python can locate the module files. In `example_project/` root directory, run the following line to add the current directory to `PYTHONPATH`: + +```shell +export PYTHONPATH=`pwd`:$PYTHONPATH +``` + +### Data Preparation + +Prepare the COCO dataset according to the [instruction](https://mmpose.readthedocs.io/en/dev-1.x/dataset_zoo/2d_body_keypoint.html#coco). + +### Training commands + +**To train with single GPU:** + +```shell +mim train mmpose configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py +``` + +**To train with multiple GPUs:** + +```shell +mim train mmpose configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py --launcher pytorch --gpus 8 +``` + +**To train with multiple GPUs by slurm:** + +```shell +mim train mmpose configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py --launcher slurm \ + --gpus 16 --gpus-per-node 8 --partition $PARTITION +``` + +### Testing commands + +**To test with single GPU:** + +```shell +mim test mmpose configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py -C $CHECKPOINT +``` + +**To test with multiple GPUs:** + +```shell +mim test mmpose configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py -C $CHECKPOINT --launcher pytorch --gpus 8 +``` + +**To test with multiple GPUs by slurm:** + +```shell +mim test mmpose configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py -C $CHECKPOINT --launcher slurm \ + --gpus 16 --gpus-per-node 8 --partition $PARTITION +``` + +## Results + +WFLW + +| Arch | Input Size | NME*test* | NME*pose* | NME*illumination* | NME*occlusion* | NME*blur* | NME*makeup* | NME*expression* | ckpt | log | +| :--------- | :--------: | :------------------: | :------------------: | :--------------------------: | :-----------------------: | :------------------: | :--------------------: | :------------------------: | :--------: | :-------: | +| [skps](./configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py) | 256x256 | 3.88 | 6.60 | 3.81 | 4.57 | 4.44 | 3.75 | 4.13 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/skps/best_NME_epoch_80.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/skps/20230522_142437.log) | + +COFW + +| Arch | Input Size | NME | ckpt | log | +| :------------------------------------------------------------- | :--------: | :--: | :------------------------------------------------------------: | :------------------------------------------------------------: | +| [skps](./configs/td-hm_hrnetv2-w18_skps-1xb16-160e_cofw-256x256.py) | 256x256 | 3.20 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/skps/best_NME_epoch_113.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/skps/20230524_074949.log) | diff --git a/projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb16-160e_cofw-256x256.py b/projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb16-160e_cofw-256x256.py new file mode 100644 index 0000000000..494c4325df --- /dev/null +++ b/projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb16-160e_cofw-256x256.py @@ -0,0 +1,176 @@ +custom_imports = dict(imports=['custom_codecs', 'models']) + +_base_ = ['mmpose::_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=160, val_interval=1) + +# optimizer +optim_wrapper = dict( + optimizer=dict(type='AdamW', lr=2e-3, weight_decay=0.0005)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=160, + milestones=[80, 120], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='NME', rule='less', interval=1)) + +# codec settings +codec = dict( + type='SKPSHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(18, 36)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(18, 36, 72)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(18, 36, 72, 144), + multiscale_output=True), + upsample=dict(mode='bilinear', align_corners=False)), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18'), + ), + neck=dict( + type='FeatureMapProcessor', + concat=True, + ), + head=dict( + type='SKPSHead', + in_channels=270, + out_channels=29, + conv_out_channels=(270, ), + conv_kernel_sizes=(1, ), + heatmap_loss=dict(type='AdaptiveWingLoss', use_target_weight=True), + offside_loss=dict(type='AdaptiveWingLoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'COFWDataset' +data_mode = 'topdown' +data_root = 'data/cofw/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale', padding=1), + dict(type='RandomFlip', direction='horizontal'), + dict( + type='Albumentation', + transforms=[ + dict(type='RandomBrightnessContrast', p=0.5), + dict(type='HueSaturationValue', p=0.5), + dict(type='GaussianBlur', p=0.5), + dict(type='GaussNoise', p=0.1), + dict( + type='CoarseDropout', + max_holes=8, + max_height=0.2, + max_width=0.2, + min_holes=1, + min_height=0.1, + min_width=0.1, + p=0.5), + ]), + dict( + type='RandomBBoxTransform', + shift_prob=0., + rotate_factor=45, + scale_factor=(0.75, 1.25), + scale_prob=0), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale', padding=1), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=16, + num_workers=4, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/cofw_train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/cofw_test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py b/projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py new file mode 100644 index 0000000000..0547ebcff2 --- /dev/null +++ b/projects/skps/configs/td-hm_hrnetv2-w18_skps-1xb64-80e_wflw-256x256.py @@ -0,0 +1,176 @@ +custom_imports = dict(imports=['custom_codecs', 'models']) + +_base_ = ['mmpose::_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=80, val_interval=1) + +# optimizer +optim_wrapper = dict( + optimizer=dict(type='AdamW', lr=2e-3, weight_decay=0.0005)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=80, + milestones=[40, 60], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='NME', rule='less', interval=1)) + +# codec settings +codec = dict( + type='SKPSHeatmap', input_size=(256, 256), heatmap_size=(64, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(18, 36)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(18, 36, 72)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(18, 36, 72, 144), + multiscale_output=True), + upsample=dict(mode='bilinear', align_corners=False)), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18'), + ), + neck=dict( + type='FeatureMapProcessor', + concat=True, + ), + head=dict( + type='SKPSHead', + in_channels=270, + out_channels=98, + conv_out_channels=(270, ), + conv_kernel_sizes=(1, ), + heatmap_loss=dict(type='AdaptiveWingLoss', use_target_weight=True), + offside_loss=dict(type='AdaptiveWingLoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'WFLWDataset' +data_mode = 'topdown' +data_root = './data/wflw/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict( + type='Albumentation', + transforms=[ + dict(type='RandomBrightnessContrast', p=0.5), + dict(type='HueSaturationValue', p=0.5), + dict(type='GaussianBlur', p=0.5), + dict(type='GaussNoise', p=0.1), + dict( + type='CoarseDropout', + max_holes=8, + max_height=0.2, + max_width=0.2, + min_holes=1, + min_height=0.1, + min_width=0.1, + p=0.5), + ]), + dict( + type='RandomBBoxTransform', + shift_prob=0.0, + rotate_factor=45, + scale_factor=(0.75, 1.25), + scale_prob=1.), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=4, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/face_landmarks_wflw_train.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/face_landmarks_wflw_test.json', + data_prefix=dict(img='images/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='NME', + norm_mode='keypoint_distance', +) +test_evaluator = val_evaluator diff --git a/projects/skps/custom_codecs/__init__.py b/projects/skps/custom_codecs/__init__.py new file mode 100644 index 0000000000..b346b55de6 --- /dev/null +++ b/projects/skps/custom_codecs/__init__.py @@ -0,0 +1,3 @@ +from .skps_heatmap import SKPSHeatmap + +__all__ = ['SKPSHeatmap'] diff --git a/projects/skps/custom_codecs/skps_heatmap.py b/projects/skps/custom_codecs/skps_heatmap.py new file mode 100644 index 0000000000..f542ff2970 --- /dev/null +++ b/projects/skps/custom_codecs/skps_heatmap.py @@ -0,0 +1,164 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import numpy as np + +from mmpose.codecs.base import BaseKeypointCodec +from mmpose.codecs.utils.gaussian_heatmap import \ + generate_unbiased_gaussian_heatmaps +from mmpose.codecs.utils.post_processing import get_heatmap_maximum +from mmpose.registry import KEYPOINT_CODECS + + +@KEYPOINT_CODECS.register_module() +class SKPSHeatmap(BaseKeypointCodec): + """Generate heatmap the same with MSRAHeatmap, and produce offset map + within x and y directions. + + Note: + + - instance number: N + - keypoint number: K + - keypoint dimension: D + - image size: [w, h] + - heatmap size: [W, H] + - offset_map size: [W, H] + + Encoded: + + - heatmaps (np.ndarray): The generated heatmap in shape (K, H, W) + where [W, H] is the `heatmap_size` + - offset_maps (np.ndarray): The generated offset map in x and y + direction in shape (2K, H, W) where [W, H] is the + `offset_map_size` + - keypoint_weights (np.ndarray): The target weights in shape (N, K) + + Args: + input_size (tuple): Image size in [w, h] + heatmap_size (tuple): Heatmap size in [W, H] + sigma (float): The sigma value of the Gaussian heatmap + """ + + def __init__(self, input_size: Tuple[int, int], + heatmap_size: Tuple[int, int], sigma: float) -> None: + super().__init__() + self.input_size = input_size + self.heatmap_size = heatmap_size + self.sigma = sigma + self.scale_factor = (np.array(input_size) / + heatmap_size).astype(np.float32) + + self.y_range, self.x_range = np.meshgrid( + np.arange(0, self.heatmap_size[1]), + np.arange(0, self.heatmap_size[0]), + indexing='ij') + + def encode(self, + keypoints: np.ndarray, + keypoints_visible: Optional[np.ndarray] = None) -> dict: + """Encode keypoints into heatmaps. Note that the original keypoint + coordinates should be in the input image space. + + Args: + keypoints (np.ndarray): Keypoint coordinates in shape (N, K, D) + keypoints_visible (np.ndarray): Keypoint visibilities in shape + (N, K) + + Returns: + dict: + - heatmaps (np.ndarray): The generated heatmap in shape + (K, H, W) where [W, H] is the `heatmap_size` + - offset_maps (np.ndarray): The generated offset maps in x and y + directions in shape (2*K, H, W) where [W, H] is the + `offset_map_size` + - keypoint_weights (np.ndarray): The target weights in shape + (N, K) + """ + + assert keypoints.shape[0] == 1, ( + f'{self.__class__.__name__} only support single-instance ' + 'keypoint encoding') + + if keypoints_visible is None: + keypoints_visible = np.ones(keypoints.shape[:2], dtype=np.float32) + + heatmaps, keypoint_weights = generate_unbiased_gaussian_heatmaps( + heatmap_size=self.heatmap_size, + keypoints=keypoints / self.scale_factor, + keypoints_visible=keypoints_visible, + sigma=self.sigma) + + offset_maps = self.generate_offset_map( + heatmap_size=self.heatmap_size, + keypoints=keypoints / self.scale_factor, + ) + + encoded = dict( + heatmaps=heatmaps, + keypoint_weights=keypoint_weights[0], + displacements=offset_maps) + + return encoded + + def generate_offset_map(self, heatmap_size: Tuple[int, int], + keypoints: np.ndarray): + + N, K, _ = keypoints.shape + + # batchsize 1 + keypoints = keypoints[0] + + # caution: there will be a broadcast which produce + # offside_x and offside_y with shape 64x64x98 + + offset_x = keypoints[:, 0] - np.expand_dims(self.x_range, axis=-1) + offset_y = keypoints[:, 1] - np.expand_dims(self.y_range, axis=-1) + + offset_map = np.concatenate([offset_x, offset_y], axis=-1) + + offset_map = np.transpose(offset_map, axes=[2, 0, 1]) + + return offset_map + + def decode(self, encoded: np.ndarray, + offset_maps: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: + """Decode keypoint coordinates from heatmaps. The decoded keypoint + coordinates are in the input image space. + + Args: + encoded (np.ndarray): Heatmaps in shape (K, H, W) + + Returns: + tuple: + - keypoints (np.ndarray): Decoded keypoint coordinates in shape + (N, K, D) + - scores (np.ndarray): The keypoint scores in shape (N, K). It + usually represents the confidence of the keypoint prediction + """ + heatmaps = encoded.copy() + + offset_maps = offset_maps.copy() + + K, H, W = heatmaps.shape + + keypoints, scores = get_heatmap_maximum(heatmaps) + + offset_x = offset_maps[:K, ...] + offset_y = offset_maps[K:, ...] + + keypoints_interger = keypoints.astype(np.int32) + keypoints_decimal = np.zeros_like(keypoints) + + for i in range(K): + [x, y] = keypoints_interger[i] + if x < 0 or y < 0: + x = y = 0 + + # caution: torch tensor shape is nchw, so index should be i,y,x + keypoints_decimal[i][0] = x + offset_x[i, y, x] + keypoints_decimal[i][1] = y + offset_y[i, y, x] + + # Restore the keypoint scale + keypoints_decimal = keypoints_decimal * self.scale_factor + + return keypoints_decimal[None], scores[None] diff --git a/projects/skps/models/__init__.py b/projects/skps/models/__init__.py new file mode 100644 index 0000000000..55377c089c --- /dev/null +++ b/projects/skps/models/__init__.py @@ -0,0 +1,3 @@ +from .skps_head import SKPSHead + +__all__ = ['SKPSHead'] diff --git a/projects/skps/models/skps_head.py b/projects/skps/models/skps_head.py new file mode 100644 index 0000000000..73f84dc443 --- /dev/null +++ b/projects/skps/models/skps_head.py @@ -0,0 +1,399 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +from typing import Optional, Sequence, Tuple, Union + +import torch +import torch.nn as nn +from mmcv.cnn import build_conv_layer +from mmengine.model import ModuleDict +from mmengine.structures import InstanceData +from torch import Tensor + +from mmpose.evaluation.functional import pose_pck_accuracy +from mmpose.models.heads.base_head import BaseHead +from mmpose.models.utils.tta import flip_coordinates +from mmpose.registry import KEYPOINT_CODECS, MODELS +from mmpose.utils.tensor_utils import to_numpy +from mmpose.utils.typing import (ConfigType, Features, InstanceList, + OptConfigType, OptSampleList, Predictions) + +OptIntSeq = Optional[Sequence[int]] + + +@MODELS.register_module() +class SKPSHead(BaseHead): + """DisEntangled Keypoint Regression head introduced in `Bottom-up human + pose estimation via disentangled keypoint regression`_ by Geng et al + (2021). The head is composed of a heatmap branch and a displacement branch. + + Args: + in_channels (int | Sequence[int]): Number of channels in the input + feature map + out_channels (int): Number of channels in the output heatmap + conv_out_channels (Sequence[int], optional): The output channel number + of each intermediate conv layer. ``None`` means no intermediate + conv layer between deconv layers and the final conv layer. + Defaults to ``None`` + conv_kernel_sizes (Sequence[int | tuple], optional): The kernel size + of each intermediate conv layer. Defaults to ``None`` + final_layer (dict): Arguments of the final Conv2d layer. + Defaults to ``dict(kernel_size=1)`` + loss (Config): Config of the keypoint loss. Defaults to use + :class:`KeypointMSELoss` + decoder (Config, optional): The decoder config that controls decoding + keypoint coordinates from the network output. Defaults to ``None`` + init_cfg (Config, optional): Config to control the initialization. See + :attr:`default_init_cfg` for default settings + + + .. _`Bottom-up human pose estimation via disentangled keypoint regression`: + https://arxiv.org/abs/2104.02300 + """ + + _version = 2 + + def __init__(self, + in_channels: Union[int, Sequence[int]], + out_channels: int, + conv_out_channels: OptIntSeq = None, + conv_kernel_sizes: OptIntSeq = None, + final_layer: dict = dict(kernel_size=1), + heatmap_loss: ConfigType = dict( + type='AdaptiveWingLoss', use_target_weight=True), + offside_loss: ConfigType = dict( + type='AdaptiveWingLoss', use_target_weight=True), + decoder: OptConfigType = None, + init_cfg: OptConfigType = None): + + if init_cfg is None: + init_cfg = self.default_init_cfg + + super().__init__(init_cfg) + + self.in_channels = in_channels + self.out_channels = out_channels + + if conv_out_channels: + if conv_kernel_sizes is None or len(conv_out_channels) != len( + conv_kernel_sizes): + raise ValueError( + '"conv_out_channels" and "conv_kernel_sizes" should ' + 'be integer sequences with the same length. Got ' + f'mismatched lengths {conv_out_channels} and ' + f'{conv_kernel_sizes}') + + self.conv_layers = self._make_conv_layers( + in_channels=in_channels, + layer_out_channels=conv_out_channels, + layer_kernel_sizes=conv_kernel_sizes) + in_channels = conv_out_channels[-1] + else: + self.conv_layers = nn.Identity() + + if final_layer is not None: + cfg = dict( + type='Conv2d', + in_channels=in_channels, + out_channels=self.out_channels * 3, + kernel_size=1, + bias=True) + cfg.update(final_layer) + self.final_layer = build_conv_layer(cfg) + else: + self.final_layer = nn.Identity() + + # build losses + self.loss_module = ModuleDict( + dict( + heatmap=MODELS.build(heatmap_loss), + offside=MODELS.build(offside_loss), + )) + + # build decoder + if decoder is not None: + self.decoder = KEYPOINT_CODECS.build(decoder) + else: + self.decoder = None + + # Register the hook to automatically convert old version state dicts + self._register_load_state_dict_pre_hook(self._load_state_dict_pre_hook) + + @property + def default_init_cfg(self): + init_cfg = [ + dict(type='Normal', layer=['Conv2d', 'ConvTranspose2d'], std=0.01), + dict(type='Constant', layer='BatchNorm2d', val=1) + ] + return init_cfg + + def _make_conv_layers(self, in_channels: int, + layer_out_channels: Sequence[int], + layer_kernel_sizes: Sequence[int]) -> nn.Module: + """Create convolutional layers by given parameters.""" + + layers = [] + for out_channels, kernel_size in zip(layer_out_channels, + layer_kernel_sizes): + padding = (kernel_size - 1) // 2 + cfg = dict( + type='Conv2d', + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=1, + padding=padding) + layers.append(build_conv_layer(cfg)) + layers.append(nn.BatchNorm2d(num_features=out_channels)) + layers.append(nn.ReLU(inplace=True)) + in_channels = out_channels + + return nn.Sequential(*layers) + + def forward(self, feats: Tuple[Tensor]) -> Tensor: + """Forward the network. The input is multi scale feature maps and the + output is a tuple of heatmap and displacement. + + Args: + feats (Tuple[Tensor]): Multi scale feature maps. + + Returns: + Tuple[Tensor]: output heatmap and displacement. + """ + x = feats[-1] + + x = self.conv_layers(x) + x = self.final_layer(x) + heatmaps = x[:, :self.out_channels, ...] + offside = x[:, self.out_channels:, ...] + return heatmaps, offside + + def loss(self, + feats: Tuple[Tensor], + batch_data_samples: OptSampleList, + train_cfg: ConfigType = {}) -> dict: + """Calculate losses from a batch of inputs and data samples. + + Args: + feats (Tuple[Tensor]): The multi-stage features + batch_data_samples (List[:obj:`PoseDataSample`]): The batch + data samples + train_cfg (dict): The runtime config for training process. + Defaults to {} + + Returns: + dict: A dictionary of losses. + """ + pred_heatmaps, pred_offside = self.forward(feats) + gt_heatmaps = torch.stack( + [d.gt_fields.heatmaps for d in batch_data_samples]) + keypoint_weights = torch.stack([ + d.gt_instance_labels.keypoint_weights for d in batch_data_samples + ]) + gt_offside = torch.stack( + [d.gt_fields.displacements for d in batch_data_samples]) + + # calculate losses + losses = dict() + heatmap_loss = self.loss_module['heatmap'](pred_heatmaps, gt_heatmaps, + keypoint_weights) + + n, c, h, w = pred_offside.size() + offside_loss_x = self.loss_module['offside'](pred_offside[:, :c // 2], + gt_offside[:, :c // 2], + gt_heatmaps) + + offside_loss_y = self.loss_module['offside'](pred_offside[:, c // 2:], + gt_offside[:, c // 2:], + gt_heatmaps) + + offside_loss = (offside_loss_x + offside_loss_y) / 2. + + losses.update({ + 'loss/heatmap': heatmap_loss, + 'loss/offside': offside_loss, + }) + # calculate accuracy + if train_cfg.get('compute_acc', True): + _, avg_acc, _ = pose_pck_accuracy( + output=to_numpy(pred_heatmaps), + target=to_numpy(gt_heatmaps), + mask=to_numpy(keypoint_weights) > 0) + + acc_pose = torch.tensor(avg_acc, device=gt_heatmaps.device) + losses.update(acc_pose=acc_pose) + + return losses + + def predict(self, + feats: Features, + batch_data_samples: OptSampleList, + test_cfg: ConfigType = {}) -> Predictions: + """Predict results from features. + + Args: + feats (Tuple[Tensor] | List[Tuple[Tensor]]): The multi-stage + features (or multiple multi-scale features in TTA) + batch_data_samples (List[:obj:`PoseDataSample`]): The batch + data samples + test_cfg (dict): The runtime config for testing process. Defaults + to {} + + Returns: + Union[InstanceList | Tuple[InstanceList | PixelDataList]]: If + ``test_cfg['output_heatmap']==True``, return both pose and heatmap + prediction; otherwise only return the pose prediction. + + The pose prediction is a list of ``InstanceData``, each contains + the following fields: + + - keypoints (np.ndarray): predicted keypoint coordinates in + shape (num_instances, K, D) where K is the keypoint number + and D is the keypoint dimension + - keypoint_scores (np.ndarray): predicted keypoint scores in + shape (num_instances, K) + """ + + flip_test = test_cfg.get('flip_test', False) + metainfo = batch_data_samples[0].metainfo + + if flip_test: + assert isinstance(feats, list) and len(feats) == 2 + flip_indices = metainfo['flip_indices'] + _feat, _feat_flip = feats + _heatmaps, _displacements = self.forward(_feat) + _heatmaps_flip, _displacements_flip = self.forward(_feat_flip) + + batch_size = _heatmaps.shape[0] + + _heatmaps = to_numpy(_heatmaps) + _displacements = to_numpy(_displacements) + + _heatmaps_flip = to_numpy(_heatmaps_flip) + _displacements_flip = to_numpy(_displacements_flip) + preds = [] + for b in range(batch_size): + _keypoints, _keypoint_scores = self.decoder.decode( + _heatmaps[b], _displacements[b]) + + _keypoints_flip, _keypoint_scores_flip = self.decoder.decode( + _heatmaps_flip[b], _displacements_flip[b]) + + # flip the kps coords + real_w = self.decoder.input_size[0] + real_h = self.decoder.input_size[1] + + # the coordinate range is 0-255 for 256x256 input size + _keypoints_flip /= (real_w - 1) + _keypoints_flip = flip_coordinates( + _keypoints_flip, + flip_indices=flip_indices, + shift_coords=False, + input_size=((real_w - 1), (real_h - 1))) + _keypoints_flip *= (real_w - 1) + + _keypoints = (_keypoints + _keypoints_flip) / 2. + # pack outputs + preds.append(InstanceData(keypoints=_keypoints)) + return preds + + else: + batch_heatmaps, batch_displacements = self.forward(feats) + + preds = self.decode(batch_heatmaps, batch_displacements, test_cfg, + metainfo) + + return preds + + def decode(self, + heatmaps: Tuple[Tensor], + offside: Tuple[Tensor], + test_cfg: ConfigType = {}, + metainfo: dict = {}) -> InstanceList: + """Decode keypoints from outputs. + + Args: + heatmaps (Tuple[Tensor]): The output heatmaps inferred from one + image or multi-scale images. + offside (Tuple[Tensor]): The output displacement fields + inferred from one image or multi-scale images. + test_cfg (dict): The runtime config for testing process. Defaults + to {} + metainfo (dict): The metainfo of test dataset. Defaults to {} + + Returns: + List[InstanceData]: A list of InstanceData, each contains the + decoded pose information of the instances of one data sample. + """ + + if self.decoder is None: + raise RuntimeError( + f'The decoder has not been set in {self.__class__.__name__}. ' + 'Please set the decoder configs in the init parameters to ' + 'enable head methods `head.predict()` and `head.decode()`') + + preds = [] + batch_size = heatmaps.shape[0] + + heatmaps = to_numpy(heatmaps) + offside = to_numpy(offside) + + for b in range(batch_size): + keypoints, keypoint_scores = self.decoder.decode( + heatmaps[b], offside[b]) + + # pack outputs + preds.append( + InstanceData( + keypoints=keypoints, keypoint_scores=keypoint_scores)) + + return preds + + def _load_state_dict_pre_hook(self, state_dict, prefix, local_meta, *args, + **kwargs): + """A hook function to convert old-version state dict of + :class:`DeepposeRegressionHead` (before MMPose v1.0.0) to a + compatible format of :class:`RegressionHead`. + + The hook will be automatically registered during initialization. + """ + version = local_meta.get('version', None) + if version and version >= self._version: + return + + # convert old-version state dict + keys = list(state_dict.keys()) + for _k in keys: + if not _k.startswith(prefix): + continue + v = state_dict.pop(_k) + k = _k[len(prefix):] + # In old version, "final_layer" includes both intermediate + # conv layers (new "conv_layers") and final conv layers (new + # "final_layer"). + # + # If there is no intermediate conv layer, old "final_layer" will + # have keys like "final_layer.xxx", which should be still + # named "final_layer.xxx"; + # + # If there are intermediate conv layers, old "final_layer" will + # have keys like "final_layer.n.xxx", where the weights of the last + # one should be renamed "final_layer.xxx", and others should be + # renamed "conv_layers.n.xxx" + k_parts = k.split('.') + if k_parts[0] == 'final_layer': + if len(k_parts) == 3: + assert isinstance(self.conv_layers, nn.Sequential) + idx = int(k_parts[1]) + if idx < len(self.conv_layers): + # final_layer.n.xxx -> conv_layers.n.xxx + k_new = 'conv_layers.' + '.'.join(k_parts[1:]) + else: + # final_layer.n.xxx -> final_layer.xxx + k_new = 'final_layer.' + k_parts[2] + else: + # final_layer.xxx remains final_layer.xxx + k_new = k + else: + k_new = k + + state_dict[prefix + k_new] = v From 2b80fcee569f9be5142f67bad0cb1b644a1b5484 Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 12 Jun 2023 14:41:19 +0800 Subject: [PATCH 24/52] [Fix] Fix md format (#2451) --- projects/rtmpose/README.md | 12 ++++++------ projects/rtmpose/README_CN.md | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index 50ce59e16b..744680ed84 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -159,7 +159,7 @@ Feel free to join our community group for more help: #### 17 Keypoints - Keypoints are defined as [COCO](http://cocodataset.org/). For details please refer to the [meta info](/configs/_base_/datasets/coco.py). - +-
    AIC+COCO @@ -202,7 +202,7 @@ Feel free to join our community group for more help: #### 26 Keypoints - Keypoints are defined as [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/). For details please refer to the [meta info](/configs/_base_/datasets/halpe26.py). - +- - Models are trained and evaluated on `Body8`. | Config | Input Size | PCK@0.1
    (Body8) | AUC
    (Body8) | Params(M) | FLOPS(G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | ncnn-FP16-Latency
    (ms)
    (Snapdragon 865) | Download | @@ -230,7 +230,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### WholeBody 2d (133 Keypoints) - Keypoints are defined as [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/). For details please refer to the [meta info](/configs/_base_/datasets/coco_wholebody.py). - +- | Config | Input Size | Whole AP | Whole AR | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | @@ -241,7 +241,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### Animal 2d (17 Keypoints) - Keypoints are defined as [AP-10K](https://github.com/AlexTheBad/AP-10K/). For details please refer to the [meta info](/configs/_base_/datasets/ap10k.py). - +- | Config | Input Size | AP
    (AP10K) | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | @@ -250,7 +250,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### Face 2d (106 Keypoints) - Keypoints are defined as [LaPa](https://github.com/JDAI-CV/lapa-dataset). For details please refer to the [meta info](/configs/_base_/datasets/lapa.py). - +-
    Face6 @@ -274,7 +274,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr ### Hand 2d (21 Keypoints) - Keypoints are defined as [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/). For details please refer to the [meta info](/configs/_base_/datasets/coco_wholebody_hand.py). - +- | Detection Config | Input Size | Model AP
    (OneHand10K) | Flops
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :---------------------------: | :--------: | :---------------------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------: | diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 1d33bd179e..6e33d1a2ee 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -150,7 +150,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 #### 17 Keypoints - 关键点骨架定义遵循 [COCO](http://cocodataset.org/). 详情见 [meta info](/configs/_base_/datasets/coco.py). - +-
    AIC+COCO @@ -193,7 +193,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 #### 26 Keypoints - 关键点骨架定义遵循 [Halpe26](https://github.com/Fang-Haoshu/Halpe-FullBody/),详情见 [meta info](/configs/_base_/datasets/halpe26.py)。 - +- - 模型在 `Body8` 上进行训练和评估。 | Config | Input Size | PCK@0.1
    (Body8) | AUC
    (Body8) | Params(M) | FLOPS(G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | ncnn-FP16-Latency
    (ms)
    (Snapdragon 865) | Download | @@ -221,7 +221,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 人体全身 2d 关键点 (133 Keypoints) - 关键点骨架定义遵循 [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/),详情见 [meta info](/configs/_base_/datasets/coco_wholebody.py)。 - +- | Config | Input Size | Whole AP | Whole AR | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :------------------------------ | :--------: | :------: | :------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :-------------------------------: | @@ -232,7 +232,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 动物 2d 关键点 (17 Keypoints) - 关键点骨架定义遵循 [AP-10K](https://github.com/AlexTheBad/AP-10K/),详情见 [meta info](/configs/_base_/datasets/ap10k.py)。 - +- | Config | Input Size | AP
    (AP10K) | FLOPS
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :----------------------------: | :--------: | :----------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :------------------------------: | @@ -241,7 +241,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 脸部 2d 关键点 (106 Keypoints) - 关键点骨架定义遵循 [LaPa](https://github.com/JDAI-CV/lapa-dataset),详情见 [meta info](/configs/_base_/datasets/lapa.py)。 - +-
    Face6 @@ -265,7 +265,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 ### 手部 2d 关键点 (21 Keypoints) - 关键点骨架定义遵循 [COCO-WholeBody](https://github.com/jin-s13/COCO-WholeBody/),详情见 [meta info](/configs/_base_/datasets/coco_wholebody_hand.py)。 - +- | Detection Config | Input Size | Model AP
    (OneHand10K) | Flops
    (G) | ORT-Latency
    (ms)
    (i7-11700) | TRT-FP16-Latency
    (ms)
    (GTX 1660Ti) | Download | | :---------------------------: | :--------: | :---------------------------: | :---------------: | :-----------------------------------------: | :------------------------------------------------: | :--------------------: | From bf3d9ee9db1612996ce5c463d72b69269bfbcc7d Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Wed, 14 Jun 2023 00:29:10 +0800 Subject: [PATCH 25/52] [Enahnce] Support openpose style visualization with inferencer (#2456) --- demo/inferencer_demo.py | 7 +++- .../inferencers/base_mmpose_inferencer.py | 17 ++++----- mmpose/apis/inferencers/mmpose_inferencer.py | 23 ++++-------- mmpose/apis/inferencers/pose2d_inferencer.py | 36 +++++++++++++++---- mmpose/apis/inferencers/pose3d_inferencer.py | 2 ++ 5 files changed, 53 insertions(+), 32 deletions(-) diff --git a/demo/inferencer_demo.py b/demo/inferencer_demo.py index d7bbbb5b52..348eea05d5 100644 --- a/demo/inferencer_demo.py +++ b/demo/inferencer_demo.py @@ -120,6 +120,12 @@ def parse_args(): type=int, default=1, help='Link thickness for visualization.') + parser.add_argument( + '--skeleton-style', + default='mmpose', + type=str, + choices=['mmpose', 'openpose'], + help='Skeleton style selection') parser.add_argument( '--vis-out-dir', type=str, @@ -142,7 +148,6 @@ def parse_args(): 'det_weights', 'det_cat_ids', 'pose3d', 'pose3d_weights' ] init_args = {} - init_args['output_heatmaps'] = call_args.pop('draw_heatmap') for init_kw in init_kws: init_args[init_kw] = call_args.pop(init_kw) diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index f914793086..bb1590dc27 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -216,9 +216,6 @@ def _webcam_reader() -> Generator: return _webcam_reader() - def _visualization_window_on_close(self, event): - self._window_closing = True - def _init_pipeline(self, cfg: ConfigType) -> Callable: """Initialize the test pipeline. @@ -233,6 +230,12 @@ def _init_pipeline(self, cfg: ConfigType) -> Callable: init_default_scope(cfg.get('default_scope', 'mmpose')) return Compose(cfg.test_dataloader.dataset.pipeline) + def update_model_visualizer_settings(self, **kwargs): + """Update the settings of models and visualizer according to inference + arguments.""" + + pass + def preprocess(self, inputs: InputsType, batch_size: int = 1, @@ -268,8 +271,7 @@ def visualize(self, kpt_thr: float = 0.3, vis_out_dir: str = '', window_name: str = '', - window_close_event_handler: Optional[Callable] = None - ) -> List[np.ndarray]: + **kwargs) -> List[np.ndarray]: """Visualize predictions. Args: @@ -289,7 +291,6 @@ def visualize(self, results w/o predictions. If left as empty, no file will be saved. Defaults to ''. window_name (str, optional): Title of display window. - window_close_event_handler (callable, optional): Returns: List[np.ndarray]: Visualization results. @@ -329,10 +330,10 @@ def visualize(self, pred, draw_gt=False, draw_bbox=draw_bbox, - draw_heatmap=True, show=show, wait_time=wait_time, - kpt_thr=kpt_thr) + kpt_thr=kpt_thr, + **kwargs) results.append(visualization) if vis_out_dir: diff --git a/mmpose/apis/inferencers/mmpose_inferencer.py b/mmpose/apis/inferencers/mmpose_inferencer.py index d7050272f6..916f83889a 100644 --- a/mmpose/apis/inferencers/mmpose_inferencer.py +++ b/mmpose/apis/inferencers/mmpose_inferencer.py @@ -60,14 +60,8 @@ class MMPoseInferencer(BaseMMPoseInferencer): } forward_kwargs: set = {'rebase_keypoint_height'} visualize_kwargs: set = { - 'return_vis', - 'show', - 'wait_time', - 'draw_bbox', - 'radius', - 'thickness', - 'kpt_thr', - 'vis_out_dir', + 'return_vis', 'show', 'wait_time', 'draw_bbox', 'radius', 'thickness', + 'kpt_thr', 'vis_out_dir', 'skeleton_style', 'draw_heatmap' } postprocess_kwargs: set = {'pred_out_dir'} @@ -80,8 +74,7 @@ def __init__(self, scope: str = 'mmpose', det_model: Optional[Union[ModelType, str]] = None, det_weights: Optional[str] = None, - det_cat_ids: Optional[Union[int, List]] = None, - output_heatmaps: Optional[bool] = None) -> None: + det_cat_ids: Optional[Union[int, List]] = None) -> None: self.visualizer = None if pose3d is not None: @@ -92,7 +85,7 @@ def __init__(self, elif pose2d is not None: self.inferencer = Pose2DInferencer(pose2d, pose2d_weights, device, scope, det_model, det_weights, - det_cat_ids, output_heatmaps) + det_cat_ids) else: raise ValueError('Either 2d or 3d pose estimation algorithm ' 'should be provided.') @@ -177,6 +170,8 @@ def __call__( postprocess_kwargs, ) = self._dispatch_kwargs(**kwargs) + self.inferencer.update_model_visualizer_settings(**kwargs) + # preprocessing if isinstance(inputs, str) and inputs.startswith('webcam'): inputs = self.inferencer._get_webcam_inputs(inputs) @@ -240,8 +235,4 @@ def visualize(self, inputs: InputsType, preds: PredType, window_name = self.inferencer.video_info['name'] return self.inferencer.visualize( - inputs, - preds, - window_name=window_name, - window_close_event_handler=self._visualization_window_on_close, - **kwargs) + inputs, preds, window_name=window_name, **kwargs) diff --git a/mmpose/apis/inferencers/pose2d_inferencer.py b/mmpose/apis/inferencers/pose2d_inferencer.py index 1e8e8d7550..3ac923e9f0 100644 --- a/mmpose/apis/inferencers/pose2d_inferencer.py +++ b/mmpose/apis/inferencers/pose2d_inferencer.py @@ -60,9 +60,6 @@ class Pose2DInferencer(BaseMMPoseInferencer): model. Defaults to None. det_cat_ids (int or list[int], optional): Category id for detection model. Defaults to None. - output_heatmaps (bool, optional): Flag to visualize predicted - heatmaps. If set to None, the default setting from the model - config will be used. Default is None. """ preprocess_kwargs: set = {'bbox_thr', 'nms_thr', 'bboxes'} @@ -76,6 +73,8 @@ class Pose2DInferencer(BaseMMPoseInferencer): 'thickness', 'kpt_thr', 'vis_out_dir', + 'skeleton_style', + 'draw_heatmap', } postprocess_kwargs: set = {'pred_out_dir'} @@ -86,15 +85,12 @@ def __init__(self, scope: Optional[str] = 'mmpose', det_model: Optional[Union[ModelType, str]] = None, det_weights: Optional[str] = None, - det_cat_ids: Optional[Union[int, Tuple]] = None, - output_heatmaps: Optional[bool] = None) -> None: + det_cat_ids: Optional[Union[int, Tuple]] = None) -> None: init_default_scope(scope) super().__init__( model=model, weights=weights, device=device, scope=scope) self.model = revert_sync_batchnorm(self.model) - if output_heatmaps is not None: - self.model.test_cfg['output_heatmaps'] = output_heatmaps # assign dataset metainfo to self.visualizer self.visualizer.set_dataset_meta(self.model.dataset_meta) @@ -134,6 +130,30 @@ def __init__(self, self._video_input = False + def update_model_visualizer_settings(self, + draw_heatmap: bool = False, + skeleton_style: str = 'mmpose', + **kwargs) -> None: + """Update the settings of models and visualizer according to inference + arguments. + + Args: + draw_heatmaps (bool, optional): Flag to visualize predicted + heatmaps. If not provided, it defaults to False. + skeleton_style (str, optional): Skeleton style selection. Valid + options are 'mmpose' and 'openpose'. Defaults to 'mmpose'. + """ + self.model.test_cfg['output_heatmaps'] = draw_heatmap + + if skeleton_style not in ['mmpose', 'openpose']: + raise ValueError('`skeleton_style` must be either \'mmpose\' ' + 'or \'openpose\'') + + if skeleton_style == 'openpose': + self.visualizer.set_dataset_meta(self.model.dataset_meta, + skeleton_style) + self.visualizer.backend = 'matplotlib' + def preprocess_single(self, input: InputType, index: int, @@ -274,6 +294,8 @@ def __call__( postprocess_kwargs, ) = self._dispatch_kwargs(**kwargs) + self.update_model_visualizer_settings(**kwargs) + # preprocessing if isinstance(inputs, str) and inputs.startswith('webcam'): inputs = self._get_webcam_inputs(inputs) diff --git a/mmpose/apis/inferencers/pose3d_inferencer.py b/mmpose/apis/inferencers/pose3d_inferencer.py index d30302cfa2..d5b2a2998d 100644 --- a/mmpose/apis/inferencers/pose3d_inferencer.py +++ b/mmpose/apis/inferencers/pose3d_inferencer.py @@ -384,6 +384,8 @@ def __call__( postprocess_kwargs, ) = self._dispatch_kwargs(**kwargs) + self.update_model_visualizer_settings(**kwargs) + # preprocessing if isinstance(inputs, str) and inputs.startswith('webcam'): inputs = self._get_webcam_inputs(inputs) From 4e38bb575ea1b20d874e4b3ed2fbce56618337cb Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Wed, 14 Jun 2023 17:33:49 +0800 Subject: [PATCH 26/52] [Feature] Support black background (#2458) --- configs/_base_/datasets/coco_openpose.py | 70 +++++++++---------- demo/inferencer_demo.py | 4 ++ .../inferencers/base_mmpose_inferencer.py | 15 ++-- mmpose/apis/inferencers/mmpose_inferencer.py | 3 +- mmpose/apis/inferencers/pose2d_inferencer.py | 2 +- mmpose/visualization/local_visualizer.py | 10 +-- .../opencv_backend_visualizer.py | 34 +++++++-- 7 files changed, 81 insertions(+), 57 deletions(-) diff --git a/configs/_base_/datasets/coco_openpose.py b/configs/_base_/datasets/coco_openpose.py index 9aedd9f0e4..cce11b27f1 100644 --- a/configs/_base_/datasets/coco_openpose.py +++ b/configs/_base_/datasets/coco_openpose.py @@ -12,77 +12,77 @@ ), keypoint_info={ 0: - dict(name='nose', id=0, color=[255, 0, 85], type='upper', swap=''), + dict(name='nose', id=0, color=[255, 0, 0], type='upper', swap=''), 1: - dict(name='neck', id=1, color=[255, 0, 0], type='upper', swap=''), + dict(name='neck', id=1, color=[255, 85, 0], type='upper', swap=''), 2: dict( name='right_shoulder', id=2, - color=[255, 85, 0], + color=[255, 170, 0], type='upper', swap='left_shoulder'), 3: dict( name='right_elbow', id=3, - color=[255, 170, 0], + color=[255, 255, 0], type='upper', swap='left_elbow'), 4: dict( name='right_wrist', id=4, - color=[255, 255, 0], + color=[170, 255, 0], type='upper', swap='left_wrist'), 5: dict( name='left_shoulder', id=5, - color=[170, 255, 0], + color=[85, 255, 0], type='upper', swap='right_shoulder'), 6: dict( name='left_elbow', id=6, - color=[85, 255, 0], + color=[0, 255, 0], type='upper', swap='right_elbow'), 7: dict( name='left_wrist', id=7, - color=[0, 255, 0], + color=[0, 255, 85], type='upper', swap='right_wrist'), 8: dict( name='right_hip', id=8, - color=[255, 0, 170], + color=[0, 255, 170], type='lower', swap='left_hip'), 9: dict( name='right_knee', id=9, - color=[255, 0, 255], + color=[0, 255, 255], type='lower', swap='left_knee'), 10: dict( name='right_ankle', id=10, - color=[170, 0, 255], + color=[0, 170, 255], type='lower', swap='left_ankle'), 11: dict( name='left_hip', id=11, - color=[85, 255, 0], + color=[0, 85, 255], type='lower', swap='right_hip'), 12: @@ -96,59 +96,59 @@ dict( name='left_ankle', id=13, - color=[0, 85, 255], + color=[85, 0, 255], type='lower', swap='right_ankle'), 14: dict( name='right_eye', id=14, - color=[0, 255, 170], + color=[170, 0, 255], type='upper', swap='left_eye'), 15: dict( name='left_eye', id=15, - color=[0, 255, 255], + color=[255, 0, 255], type='upper', swap='right_eye'), 16: dict( name='right_ear', id=16, - color=[0, 170, 255], + color=[255, 0, 170], type='upper', swap='left_ear'), 17: dict( name='left_ear', id=17, - color=[0, 170, 255], + color=[255, 0, 85], type='upper', swap='right_ear'), }, skeleton_info={ - 0: dict(link=('neck', 'right_shoulder'), id=0, color=[255, 0, 85]), - 1: dict(link=('neck', 'left_shoulder'), id=1, color=[255, 0, 0]), - 2: - dict(link=('right_shoulder', 'right_elbow'), id=2, color=[255, 85, 0]), + 0: dict(link=('neck', 'right_shoulder'), id=0, color=[255, 0, 0]), + 1: dict(link=('neck', 'left_shoulder'), id=1, color=[255, 85, 0]), + 2: dict( + link=('right_shoulder', 'right_elbow'), id=2, color=[255, 170, 0]), 3: - dict(link=('right_elbow', 'right_wrist'), id=3, color=[255, 170, 0]), + dict(link=('right_elbow', 'right_wrist'), id=3, color=[255, 255, 0]), 4: - dict(link=('left_shoulder', 'left_elbow'), id=4, color=[255, 255, 0]), - 5: dict(link=('left_elbow', 'left_wrist'), id=5, color=[170, 255, 0]), - 6: dict(link=('neck', 'right_hip'), id=6, color=[85, 255, 0]), - 7: dict(link=('right_hip', 'right_knee'), id=7, color=[0, 255, 0]), - 8: dict(link=('right_knee', 'right_ankle'), id=8, color=[0, 255, 85]), - 9: dict(link=('neck', 'left_hip'), id=9, color=[0, 255, 170]), - 10: dict(link=('left_hip', 'left_knee'), id=10, color=[0, 255, 225]), - 11: dict(link=('left_knee', 'left_ankle'), id=11, color=[0, 170, 255]), - 12: dict(link=('neck', 'nose'), id=12, color=[0, 85, 255]), - 13: dict(link=('nose', 'right_eye'), id=13, color=[0, 0, 255]), - 14: dict(link=('right_eye', 'right_ear'), id=14, color=[255, 0, 170]), - 15: dict(link=('nose', 'left_eye'), id=15, color=[170, 0, 255]), - 16: dict(link=('left_eye', 'left_ear'), id=16, color=[255, 0, 255]), + dict(link=('left_shoulder', 'left_elbow'), id=4, color=[170, 255, 0]), + 5: dict(link=('left_elbow', 'left_wrist'), id=5, color=[85, 255, 0]), + 6: dict(link=('neck', 'right_hip'), id=6, color=[0, 255, 0]), + 7: dict(link=('right_hip', 'right_knee'), id=7, color=[0, 255, 85]), + 8: dict(link=('right_knee', 'right_ankle'), id=8, color=[0, 255, 170]), + 9: dict(link=('neck', 'left_hip'), id=9, color=[0, 255, 225]), + 10: dict(link=('left_hip', 'left_knee'), id=10, color=[0, 170, 255]), + 11: dict(link=('left_knee', 'left_ankle'), id=11, color=[0, 85, 255]), + 12: dict(link=('neck', 'nose'), id=12, color=[0, 0, 255]), + 13: dict(link=('nose', 'right_eye'), id=13, color=[255, 0, 170]), + 14: dict(link=('right_eye', 'right_ear'), id=14, color=[170, 0, 255]), + 15: dict(link=('nose', 'left_eye'), id=15, color=[255, 0, 255]), + 16: dict(link=('left_eye', 'left_ear'), id=16, color=[255, 0, 170]), }, joint_weights=[1.] * 18, sigmas=[ diff --git a/demo/inferencer_demo.py b/demo/inferencer_demo.py index 348eea05d5..b91e91f74b 100644 --- a/demo/inferencer_demo.py +++ b/demo/inferencer_demo.py @@ -126,6 +126,10 @@ def parse_args(): type=str, choices=['mmpose', 'openpose'], help='Skeleton style selection') + parser.add_argument( + '--black-background', + action='store_true', + help='Plot predictions on a black image') parser.add_argument( '--vis-out-dir', type=str, diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index bb1590dc27..985c775967 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -39,14 +39,8 @@ class BaseMMPoseInferencer(BaseInferencer): preprocess_kwargs: set = {'bbox_thr', 'nms_thr', 'bboxes'} forward_kwargs: set = set() visualize_kwargs: set = { - 'return_vis', - 'show', - 'wait_time', - 'draw_bbox', - 'radius', - 'thickness', - 'kpt_thr', - 'vis_out_dir', + 'return_vis', 'show', 'wait_time', 'draw_bbox', 'radius', 'thickness', + 'kpt_thr', 'vis_out_dir', 'black_background' } postprocess_kwargs: set = {'pred_out_dir'} @@ -271,6 +265,7 @@ def visualize(self, kpt_thr: float = 0.3, vis_out_dir: str = '', window_name: str = '', + black_background: bool = False, **kwargs) -> List[np.ndarray]: """Visualize predictions. @@ -291,6 +286,8 @@ def visualize(self, results w/o predictions. If left as empty, no file will be saved. Defaults to ''. window_name (str, optional): Title of display window. + black_background (bool, optional): Whether to plot keypoints on a + black image instead of the input image. Defaults to False. Returns: List[np.ndarray]: Visualization results. @@ -315,6 +312,8 @@ def visualize(self, else: raise ValueError('Unsupported input type: ' f'{type(single_input)}') + if black_background: + img = img * 0 img_name = os.path.basename(pred.metainfo['img_path']) window_name = window_name if window_name else img_name diff --git a/mmpose/apis/inferencers/mmpose_inferencer.py b/mmpose/apis/inferencers/mmpose_inferencer.py index 916f83889a..b44361bba8 100644 --- a/mmpose/apis/inferencers/mmpose_inferencer.py +++ b/mmpose/apis/inferencers/mmpose_inferencer.py @@ -61,7 +61,8 @@ class MMPoseInferencer(BaseMMPoseInferencer): forward_kwargs: set = {'rebase_keypoint_height'} visualize_kwargs: set = { 'return_vis', 'show', 'wait_time', 'draw_bbox', 'radius', 'thickness', - 'kpt_thr', 'vis_out_dir', 'skeleton_style', 'draw_heatmap' + 'kpt_thr', 'vis_out_dir', 'skeleton_style', 'draw_heatmap', + 'black_background' } postprocess_kwargs: set = {'pred_out_dir'} diff --git a/mmpose/apis/inferencers/pose2d_inferencer.py b/mmpose/apis/inferencers/pose2d_inferencer.py index 3ac923e9f0..3f1f20fdc0 100644 --- a/mmpose/apis/inferencers/pose2d_inferencer.py +++ b/mmpose/apis/inferencers/pose2d_inferencer.py @@ -75,6 +75,7 @@ class Pose2DInferencer(BaseMMPoseInferencer): 'vis_out_dir', 'skeleton_style', 'draw_heatmap', + 'black_background', } postprocess_kwargs: set = {'pred_out_dir'} @@ -152,7 +153,6 @@ def update_model_visualizer_settings(self, if skeleton_style == 'openpose': self.visualizer.set_dataset_meta(self.model.dataset_meta, skeleton_style) - self.visualizer.backend = 'matplotlib' def preprocess_single(self, input: InputType, diff --git a/mmpose/visualization/local_visualizer.py b/mmpose/visualization/local_visualizer.py index 205993c006..080e628e33 100644 --- a/mmpose/visualization/local_visualizer.py +++ b/mmpose/visualization/local_visualizer.py @@ -64,7 +64,7 @@ class PoseLocalVisualizer(OpencvBackendVisualizer): radius (int, float): The radius of keypoints. Defaults to 4 show_keypoint_weight (bool): Whether to adjust the transparency of keypoints according to their score. Defaults to ``False`` - alpha (int, float): The transparency of bboxes. Defaults to ``0.8`` + alpha (int, float): The transparency of bboxes. Defaults to ``1.0`` Examples: >>> import numpy as np @@ -116,7 +116,7 @@ def __init__(self, radius: Union[int, float] = 3, show_keypoint_weight: bool = False, backend: str = 'opencv', - alpha: float = 0.8): + alpha: float = 1.0): super().__init__( name=name, image=image, @@ -345,13 +345,13 @@ def _draw_instances_kpts(self, mX = np.mean(X) mY = np.mean(Y) length = ((Y[0] - Y[1])**2 + (X[0] - X[1])**2)**0.5 + transparency = 0.6 angle = math.degrees( math.atan2(Y[0] - Y[1], X[0] - X[1])) - stickwidth = 2 polygons = cv2.ellipse2Poly( (int(mX), int(mY)), - (int(length / 2), int(stickwidth)), int(angle), - 0, 360, 1) + (int(length / 2), int(self.line_width)), + int(angle), 0, 360, 1) self.draw_polygons( polygons, diff --git a/mmpose/visualization/opencv_backend_visualizer.py b/mmpose/visualization/opencv_backend_visualizer.py index 66a7731c76..1c17506640 100644 --- a/mmpose/visualization/opencv_backend_visualizer.py +++ b/mmpose/visualization/opencv_backend_visualizer.py @@ -26,6 +26,7 @@ class OpencvBackendVisualizer(Visualizer): Defaults to empty dict. backend (str): Backend used to draw elements on the image and display the image. Defaults to 'matplotlib'. + alpha (int, float): The transparency of bboxes. Defaults to ``1.0`` """ def __init__(self, @@ -87,6 +88,7 @@ def draw_circles(self, radius: Union[np.ndarray, torch.Tensor], face_colors: Union[str, tuple, List[str], List[tuple]] = 'none', + alpha: float = 1.0, **kwargs) -> 'Visualizer': """Draw single or multiple circles. @@ -123,13 +125,22 @@ def draw_circles(self, center=center, radius=radius, face_colors=face_colors, + alpha=alpha, **kwargs) elif self.backend == 'opencv': if isinstance(face_colors, str): face_colors = mmcv.color_val(face_colors) - self._image = cv2.circle(self._image, - (int(center[0]), int(center[1])), - int(radius), face_colors, -1) + + if alpha == 1.0: + self._image = cv2.circle(self._image, + (int(center[0]), int(center[1])), + int(radius), face_colors, -1) + else: + img = cv2.circle(self._image.copy(), + (int(center[0]), int(center[1])), int(radius), + face_colors, -1) + self._image = cv2.addWeighted(self._image, 1 - alpha, img, + alpha, 0) else: raise ValueError(f'got unsupported backend {self.backend}') @@ -362,6 +373,7 @@ def draw_polygons(self, List[Union[np.ndarray, torch.Tensor]]], edge_colors: Union[str, tuple, List[str], List[tuple]] = 'g', + alpha: float = 1.0, **kwargs) -> 'Visualizer': """Draw single or multiple bboxes. @@ -394,12 +406,20 @@ def draw_polygons(self, """ if self.backend == 'matplotlib': super().draw_polygons( - polygons=polygons, edge_colors=edge_colors, **kwargs) + polygons=polygons, + edge_colors=edge_colors, + alpha=alpha, + **kwargs) elif self.backend == 'opencv': - - self._image = cv2.fillConvexPoly(self._image, polygons, - edge_colors) + if alpha == 1.0: + self._image = cv2.fillConvexPoly(self._image, polygons, + edge_colors) + else: + img = cv2.fillConvexPoly(self._image.copy(), polygons, + edge_colors) + self._image = cv2.addWeighted(self._image, 1 - alpha, img, + alpha, 0) else: raise ValueError(f'got unsupported backend {self.backend}') From c5e9378f8fde9b7378d387d44a0741a7f0f64e9c Mon Sep 17 00:00:00 2001 From: CescMessi Date: Thu, 15 Jun 2023 14:25:17 +0800 Subject: [PATCH 27/52] [Feature] Add tool for converting label studio json result to coco format (#2385) --- docs/en/dataset_zoo/dataset_tools.md | 35 +++ docs/en/dataset_zoo/label_studio.md | 76 ++++++ docs/zh_cn/dataset_zoo/dataset_tools.md | 35 +++ docs/zh_cn/dataset_zoo/label_studio.md | 76 ++++++ tools/dataset_converters/labelstudio2coco.py | 249 +++++++++++++++++++ 5 files changed, 471 insertions(+) create mode 100644 docs/en/dataset_zoo/label_studio.md create mode 100644 docs/zh_cn/dataset_zoo/label_studio.md create mode 100755 tools/dataset_converters/labelstudio2coco.py diff --git a/docs/en/dataset_zoo/dataset_tools.md b/docs/en/dataset_zoo/dataset_tools.md index 3ff70fc401..44a7c96b2b 100644 --- a/docs/en/dataset_zoo/dataset_tools.md +++ b/docs/en/dataset_zoo/dataset_tools.md @@ -361,3 +361,38 @@ For example, ```shell python tools/dataset/mat2json work_dirs/res50_mpii_256x256/pred.mat data/mpii/annotations/mpii_val.json pred.json ``` + +## Label Studio + +
    +Label Studio + +```bibtex +@misc{Label Studio, + title={{Label Studio}: Data labeling software}, + url={https://github.com/heartexlabs/label-studio}, + note={Open source software available from https://github.com/heartexlabs/label-studio}, + author={ + Maxim Tkachenko and + Mikhail Malyuk and + Andrey Holmanyuk and + Nikolai Liubimov}, + year={2020-2022}, +} +``` + +
    + +For users of [Label Studio](https://github.com/heartexlabs/label-studio/), please follow the instructions in the [Label Studio to COCO document](./label_studio.md) to annotate and export the results as a Label Studio `.json` file. And save the `Code` from the `Labeling Interface` as an `.xml` file. + +We provide a script to convert Label Studio `.json` annotation file to COCO `.json` format file. It can be used by running the following command: + +```shell +python tools/dataset_converters/labelstudio2coco.py ${LS_JSON_FILE} ${LS_XML_FILE} ${OUTPUT_COCO_JSON_FILE} +``` + +For example, + +```shell +python tools/dataset_converters/labelstudio2coco.py config.xml project-1-at-2023-05-13-09-22-91b53efa.json output/result.json +``` diff --git a/docs/en/dataset_zoo/label_studio.md b/docs/en/dataset_zoo/label_studio.md new file mode 100644 index 0000000000..3b499e05c6 --- /dev/null +++ b/docs/en/dataset_zoo/label_studio.md @@ -0,0 +1,76 @@ +# Label Studio Annotations to COCO Script + +[Label Studio](https://labelstud.io/) is a popular deep learning annotation tool that can be used for annotating various tasks. However, for keypoint annotation, Label Studio can not directly export to the COCO format required by MMPose. This article will explain how to use Label Studio to annotate keypoint data and convert it into the required COCO format using the [labelstudio2coco.py](../../../tools/dataset_converters/labelstudio2coco.py) tool. + +## Label Studio Annotation Requirements + +According to the COCO format requirements, each annotated instance needs to include information about keypoints, segmentation, and bounding box (bbox). However, Label Studio scatters this information across different instances during annotation. Therefore, certain rules need to be followed during annotation to ensure proper usage with the subsequent scripts. + +1. Label Interface Setup + +For a newly created Label Studio project, the label interface needs to be set up. There should be three types of annotations: `KeyPointLabels`, `PolygonLabels`, and `RectangleLabels`, which correspond to `keypoints`, `segmentation`, and `bbox` in the COCO format, respectively. The following is an example of a label interface. You can find the `Labeling Interface` in the project's `Settings`, click on `Code`, and paste the following example. + +```xml + + + + + + + + + +``` + +2. Annotation Order + +Since it is necessary to combine annotations of different types into one instance, a specific order of annotation is required to determine whether the annotations belong to the same instance. Annotations should be made in the order of `KeyPointLabels` -> `PolygonLabels`/`RectangleLabels`. The order and number of `KeyPointLabels` should match the order and number of keypoints specified in the `dataset_info` in MMPose configuration file. The annotation order of `PolygonLabels` and `RectangleLabels` can be interchangeable, and only one of them needs to be annotated. The annotation should be within one instance starts with keypoints and ends with non-keypoints. The following image shows an annotation example: + +*Note: The bbox and area will be calculated based on the later PolygonLabels/RectangleLabels. If you annotate PolygonLabels first, the bbox will be based on the range of the later RectangleLabels, and the area will be equal to the area of the rectangle. Conversely, they will be based on the minimum bounding rectangle of the polygon and the area of the polygon.* + +![image](https://github.com/open-mmlab/mmpose/assets/15847281/b2d004d0-8361-42c5-9180-cfbac0373a94) + +3. Exporting Annotations + +Once the annotations are completed as described above, they need to be exported. Select the `Export` button on the project interface, choose the `JSON` format, and click `Export` to download the JSON file containing the labels. + +*Note: The exported file only contains the labels and does not include the original images. Therefore, the corresponding annotated images need to be provided separately. It is not recommended to use directly uploaded files because Label Studio truncates long filenames. Instead, use the export COCO format tool available in the `Export` functionality, which includes a folder with the image files within the downloaded compressed package.* + +![image](https://github.com/open-mmlab/mmpose/assets/15847281/9f54ca3d-8cdd-4d7f-8ed6-494badcfeaf2) + +## Usage of the Conversion Tool Script + +The conversion tool script is located at `tools/dataset_converters/labelstudio2coco.py`and can be used as follows: + +```bash +python tools/dataset_converters/labelstudio2coco.py config.xml project-1-at-2023-05-13-09-22-91b53efa.json output/result.json +``` + +Where `config.xml` contains the code from the Labeling Interface mentioned earlier, `project-1-at-2023-05-13-09-22-91b53efa.json` is the JSON file exported from Label Studio, and `output/result.json` is the path to the resulting JSON file in COCO format. If the path does not exist, the script will create it automatically. + +Afterward, place the image folder in the output directory to complete the conversion of the COCO dataset. The directory structure can be as follows: + +```bash +. +├── images +│   ├── 38b480f2.jpg +│   └── aeb26f04.jpg +└── result.json + +``` + +If you want to use this dataset in MMPose, you can make modifications like the following example: + +```python +dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='result.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, +) +``` diff --git a/docs/zh_cn/dataset_zoo/dataset_tools.md b/docs/zh_cn/dataset_zoo/dataset_tools.md index ab30fc5604..a2e6d01d97 100644 --- a/docs/zh_cn/dataset_zoo/dataset_tools.md +++ b/docs/zh_cn/dataset_zoo/dataset_tools.md @@ -376,3 +376,38 @@ python tools/dataset_converters/mat2json ${PRED_MAT_FILE} ${GT_JSON_FILE} ${OUTP ```shell python tools/dataset/mat2json work_dirs/res50_mpii_256x256/pred.mat data/mpii/annotations/mpii_val.json pred.json ``` + +## Label Studio 数据集 + +
    +Label Studio + +```bibtex +@misc{Label Studio, + title={{Label Studio}: Data labeling software}, + url={https://github.com/heartexlabs/label-studio}, + note={Open source software available from https://github.com/heartexlabs/label-studio}, + author={ + Maxim Tkachenko and + Mikhail Malyuk and + Andrey Holmanyuk and + Nikolai Liubimov}, + year={2020-2022}, +} +``` + +
    + +对于 [Label Studio](https://github.com/heartexlabs/label-studio/) 用户,请依照 [Label Studio 转换工具文档](./label_studio.md) 中的方法进行标注,并将结果导出为 Label Studio 标准的 `.json` 文件,将 `Labeling Interface` 中的 `Code` 保存为 `.xml` 文件。 + +我们提供了一个脚本来将 Label Studio 标准的 `.json` 格式标注文件转换为 COCO 标准的 `.json` 格式。这可以通过运行以下命令完成: + +```shell +python tools/dataset_converters/labelstudio2coco.py ${LS_JSON_FILE} ${LS_XML_FILE} ${OUTPUT_COCO_JSON_FILE} +``` + +例如: + +```shell +python tools/dataset_converters/labelstudio2coco.py config.xml project-1-at-2023-05-13-09-22-91b53efa.json output/result.json +``` diff --git a/docs/zh_cn/dataset_zoo/label_studio.md b/docs/zh_cn/dataset_zoo/label_studio.md new file mode 100644 index 0000000000..94cbd6418c --- /dev/null +++ b/docs/zh_cn/dataset_zoo/label_studio.md @@ -0,0 +1,76 @@ +# Label Studio 标注工具转COCO脚本 + +[Label Studio](https://labelstud.io/) 是一款广受欢迎的深度学习标注工具,可以对多种任务进行标注,然而对于关键点标注,Label Studio 无法直接导出成 MMPose 所需要的 COCO 格式。本文将介绍如何使用Label Studio 标注关键点数据,并利用 [labelstudio2coco.py](../../../tools/dataset_converters/labelstudio2coco.py) 工具将其转换为训练所需的格式。 + +## Label Studio 标注要求 + +根据 COCO 格式的要求,每个标注的实例中都需要包含关键点、分割和 bbox 的信息,然而 Label Studio 在标注时会将这些信息分散在不同的实例中,因此需要按一定规则进行标注,才能正常使用后续的脚本。 + +1. 标签接口设置 + +对于一个新建的 Label Studio 项目,首先要设置它的标签接口。这里需要有三种类型的标注:`KeyPointLabels`、`PolygonLabels`、`RectangleLabels`,分别对应 COCO 格式中的`keypoints`、`segmentation`、`bbox`。以下是一个标签接口的示例,可以在项目的`Settings`中找到`Labeling Interface`,点击`Code`,粘贴使用该示例。 + +```xml + + + + + + + + + +``` + +2. 标注顺序 + +由于需要将多个标注实例中的不同类型标注组合到一个实例中,因此采取了按特定顺序标注的方式,以此来判断各标注是否位于同一个实例。标注时须按照 **KeyPointLabels -> PolygonLabels/RectangleLabels** 的顺序标注,其中 KeyPointLabels 的顺序和数量要与 MMPose 配置文件中的`dataset_info`的关键点顺序和数量一致, PolygonLabels 和 RectangleLabels 的标注顺序可以互换,且可以只标注其中一个,只要保证一个实例的标注中,以关键点开始,以非关键点结束即可。下图为标注的示例: + +*注:bbox 和 area 会根据靠后的 PolygonLabels/RectangleLabels 来计算,如若先标 PolygonLabels,那么bbox会是靠后的 RectangleLabels 的范围,面积为矩形的面积,反之则是多边形外接矩形和多边形的面积* + +![image](https://github.com/open-mmlab/mmpose/assets/15847281/b2d004d0-8361-42c5-9180-cfbac0373a94) + +3. 导出标注 + +上述标注完成后,需要将标注进行导出。选择项目界面的`Export`按钮,选择`JSON`格式,再点击`Export`即可下载包含标签的 JSON 格式文件。 + +*注:上述文件中仅仅包含标签,不包含原始图片,因此需要额外提供标注对应的图片。由于 Label Studio 会对过长的文件名进行截断,因此不建议直接使用上传的文件,而是使用`Export`功能中的导出 COCO 格式工具,使用压缩包内的图片文件夹。* + +![image](https://github.com/open-mmlab/mmpose/assets/15847281/9f54ca3d-8cdd-4d7f-8ed6-494badcfeaf2) + +## 转换工具脚本的使用 + +转换工具脚本位于`tools/dataset_converters/labelstudio2coco.py`,使用方式如下: + +```bash +python tools/dataset_converters/labelstudio2coco.py config.xml project-1-at-2023-05-13-09-22-91b53efa.json output/result.json +``` + +其中`config.xml`的内容为标签接口设置中提到的`Labeling Interface`中的`Code`,`project-1-at-2023-05-13-09-22-91b53efa.json`即为导出标注时导出的 Label Studio 格式的 JSON 文件,`output/result.json`为转换后得到的 COCO 格式的 JSON 文件路径,若路径不存在,该脚本会自动创建路径。 + +随后,将图片的文件夹放置在输出目录下,即可完成 COCO 数据集的转换。目录结构示例如下: + +```bash +. +├── images +│   ├── 38b480f2.jpg +│   └── aeb26f04.jpg +└── result.json + +``` + +若想在 MMPose 中使用该数据集,可以进行类似如下的修改: + +```python +dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='result.json', + data_prefix=dict(img='images/'), + pipeline=train_pipeline, +) +``` diff --git a/tools/dataset_converters/labelstudio2coco.py b/tools/dataset_converters/labelstudio2coco.py new file mode 100755 index 0000000000..12f4c61851 --- /dev/null +++ b/tools/dataset_converters/labelstudio2coco.py @@ -0,0 +1,249 @@ +# ----------------------------------------------------------------------------- +# Based on https://github.com/heartexlabs/label-studio-converter +# Original license: Copyright (c) Heartex, under the Apache 2.0 License. +# ----------------------------------------------------------------------------- + +import argparse +import io +import json +import logging +import pathlib +import xml.etree.ElementTree as ET +from datetime import datetime + +import numpy as np + +logger = logging.getLogger(__name__) + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert Label Studio JSON file to COCO format JSON File') + parser.add_argument('config', help='Labeling Interface xml code file path') + parser.add_argument('input', help='Label Studio format JSON file path') + parser.add_argument('output', help='The output COCO format JSON file path') + args = parser.parse_args() + return args + + +class LSConverter: + + def __init__(self, config: str): + """Convert the Label Studio Format JSON file to COCO format JSON file + which is needed by mmpose. + + The annotations in label studio must follow the order: + keypoint 1, keypoint 2... keypoint n, rect of the instance, + polygon of the instance, + then annotations of the next instance. + Where the order of rect and polygon can be switched, + the bbox and area of the instance will be calculated with + the data behind. + + Only annotating one of rect and polygon is also acceptable. + Args: + config (str): The annotations config xml file. + The xml content is from Project Setting -> + Label Interface -> Code. + Example: + ``` + + + + + + + + + + ``` + """ + # get label info from config file + tree = ET.parse(config) + root = tree.getroot() + labels = root.findall('.//KeyPointLabels/Label') + label_values = [label.get('value') for label in labels] + + self.categories = list() + self.category_name_to_id = dict() + for i, value in enumerate(label_values): + # category id start with 1 + self.categories.append({'id': i + 1, 'name': value}) + self.category_name_to_id[value] = i + 1 + + def convert_to_coco(self, input_json: str, output_json: str): + """Convert `input_json` to COCO format and save in `output_json`. + + Args: + input_json (str): The path of Label Studio format JSON file. + output_json (str): The path of the output COCO JSON file. + """ + + def add_image(images, width, height, image_id, image_path): + images.append({ + 'width': width, + 'height': height, + 'id': image_id, + 'file_name': image_path, + }) + return images + + output_path = pathlib.Path(output_json) + output_path.parent.mkdir(parents=True, exist_ok=True) + + images = list() + annotations = list() + + with open(input_json, 'r') as f: + ann_list = json.load(f) + + for item_idx, item in enumerate(ann_list): + # each image is an item + image_name = item['file_upload'] + image_id = len(images) + width, height = None, None + + # skip tasks without annotations + if not item['annotations']: + logger.warning('No annotations found for item #' + + str(item_idx)) + continue + + kp_num = 0 + for i, label in enumerate(item['annotations'][0]['result']): + category_name = None + + # valid label + for key in [ + 'rectanglelabels', 'polygonlabels', 'labels', + 'keypointlabels' + ]: + if key == label['type'] and len(label['value'][key]) > 0: + category_name = label['value'][key][0] + break + + if category_name is None: + logger.warning('Unknown label type or labels are empty') + continue + + if not height or not width: + if 'original_width' not in label or \ + 'original_height' not in label: + logger.debug( + f'original_width or original_height not found' + f'in {image_name}') + continue + + # get height and width info from annotations + width, height = label['original_width'], label[ + 'original_height'] + images = add_image(images, width, height, image_id, + image_name) + + category_id = self.category_name_to_id[category_name] + + annotation_id = len(annotations) + + if 'rectanglelabels' == label['type'] or 'labels' == label[ + 'type']: + + x = label['value']['x'] + y = label['value']['y'] + w = label['value']['width'] + h = label['value']['height'] + + x = x * label['original_width'] / 100 + y = y * label['original_height'] / 100 + w = w * label['original_width'] / 100 + h = h * label['original_height'] / 100 + + # rect annotation should be later than keypoints + annotations[-1]['bbox'] = [x, y, w, h] + annotations[-1]['area'] = w * h + annotations[-1]['num_keypoints'] = kp_num + + elif 'polygonlabels' == label['type']: + points_abs = [(x / 100 * width, y / 100 * height) + for x, y in label['value']['points']] + x, y = zip(*points_abs) + + x1, y1, x2, y2 = min(x), min(y), max(x), max(y) + + # calculate bbox and area from polygon's points + # which may be different with rect annotation + bbox = [x1, y1, x2 - x1, y2 - y1] + area = float(0.5 * np.abs( + np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))) + + # polygon label should be later than keypoints + annotations[-1]['segmentation'] = [[ + coord for point in points_abs for coord in point + ]] + annotations[-1]['bbox'] = bbox + annotations[-1]['area'] = area + annotations[-1]['num_keypoints'] = kp_num + + elif 'keypointlabels' == label['type']: + x = label['value']['x'] * label['original_width'] / 100 + y = label['value']['y'] * label['original_height'] / 100 + + # there is no method to annotate visible in Label Studio + # so the keypoints' visible code will be 2 except (0,0) + if x == y == 0: + current_kp = [x, y, 0] + kp_num_change = 0 + else: + current_kp = [x, y, 2] + kp_num_change = 1 + + # create new annotation in coco + # when the keypoint is the first point of an instance + if i == 0 or item['annotations'][0]['result'][ + i - 1]['type'] != 'keypointlabels': + annotations.append({ + 'id': annotation_id, + 'image_id': image_id, + 'category_id': category_id, + 'keypoints': current_kp, + 'ignore': 0, + 'iscrowd': 0, + }) + kp_num = kp_num_change + else: + annotations[-1]['keypoints'].extend(current_kp) + kp_num += kp_num_change + + with io.open(output_json, mode='w', encoding='utf8') as fout: + json.dump( + { + 'images': images, + 'categories': self.categories, + 'annotations': annotations, + 'info': { + 'year': datetime.now().year, + 'version': '1.0', + 'description': '', + 'contributor': 'Label Studio', + 'url': '', + 'date_created': str(datetime.now()), + }, + }, + fout, + indent=2, + ) + + +def main(): + args = parse_args() + config = args.config + input_json = args.input + output_json = args.output + converter = LSConverter(config) + converter.convert_to_coco(input_json, output_json) + + +if __name__ == '__main__': + main() From 96a35c990c5e4c2e409703d2e6c8ac54b25630b0 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Fri, 16 Jun 2023 10:18:50 +0800 Subject: [PATCH 28/52] [Enhance] Pose3d inferencer supports image inputs (#2460) --- docs/en/user_guides/inference.md | 77 ++++++++++++--- docs/zh_cn/user_guides/inference.md | 93 ++++++++++++++----- .../inferencers/base_mmpose_inferencer.py | 2 + mmpose/apis/inferencers/pose3d_inferencer.py | 61 +++++------- .../test_pose3d_inferencer.py | 35 +++++-- 5 files changed, 187 insertions(+), 81 deletions(-) diff --git a/docs/en/user_guides/inference.md b/docs/en/user_guides/inference.md index cfb960be3b..c9102132ac 100644 --- a/docs/en/user_guides/inference.md +++ b/docs/en/user_guides/inference.md @@ -20,7 +20,7 @@ from mmpose.apis import MMPoseInferencer img_path = 'tests/data/coco/000000000785.jpg' # replace this with your own image path -# create the inferencer using the model alias +# instantiate the inferencer using the model alias inferencer = MMPoseInferencer('human') # The MMPoseInferencer API employs a lazy inference approach, @@ -32,7 +32,46 @@ result = next(result_generator) If everything works fine, you will see the following image in a new window: ![inferencer_result_coco](https://user-images.githubusercontent.com/26127467/220008302-4a57fd44-0978-408e-8351-600e5513316a.jpg) -The variable `result` is a dictionary that contains two keys, `'visualization'` and `'predictions'`. The `'visualization'` key is meant to store visualization results, but since the `return_vis` argument wasn't specified, this list remains empty. The `'predictions'` key, however, holds a list of estimated keypoints for each detected instance. +The `result` variable is a dictionary comprising two keys, `'visualization'` and `'predictions'`. + +- `'visualization'` holds a list which: + + - contains visualization results, such as the input image, markers of the estimated poses, and optional predicted heatmaps. + - remains empty if the `return_vis` argument is not specified. + +- `'predictions'` stores: + + - a list of estimated keypoints for each identified instance. + +The structure of the `result` dictionary is as follows: + +```python +result = { + 'visualization': [ + # number of elements: batch_size (defaults to 1) + vis_image_1, + ... + ], + 'predictions': [ + # pose estimation result of each image + # number of elements: batch_size (defaults to 1) + [ + # pose information of each detected instance + # number of elements: number of detected instances + {'keypoints': ..., # instance 1 + 'keypoint_scores': ..., + ... + }, + {'keypoints': ..., # instance 2 + 'keypoint_scores': ..., + ... + }, + ] + ... + ] +} + +``` A **command-line interface (CLI)** tool for the inferencer is also available: `demo/inferencer_demo.py`. This tool allows users to perform inference using the same model and inputs with the following command: @@ -175,24 +214,34 @@ The `MMPoseInferencer` offers a variety of arguments for customizing pose estima | ---------------- | ---------------------------------------------------------------------------------------------------------------- | | `pose2d` | Specifies the model alias, configuration file name, or configuration file path for the 2D pose estimation model. | | `pose2d_weights` | Specifies the URL or local path to the 2D pose estimation model's checkpoint file. | +| `pose3d` | Specifies the model alias, configuration file name, or configuration file path for the 3D pose estimation model. | +| `pose3d_weights` | Specifies the URL or local path to the 3D pose estimation model's checkpoint file. | | `det_model` | Specifies the model alias, configuration file name, or configuration file path for the object detection model. | | `det_weights` | Specifies the URL or local path to the object detection model's checkpoint file. | | `det_cat_ids` | Specifies the list of category IDs corresponding to the object classes to be detected. | | `device` | The device to perform the inference. If left `None`, the Inferencer will select the most suitable one. | | `scope` | The namespace where the model modules are defined. | -The inferencer is designed to handle both visualization and saving of predictions. Here is a list of arguments available when performing inference with the `MMPoseInferencer`: - -| Argument | Description | -| ------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -| `show` | Determines whether the image or video should be displayed in a pop-up window. | -| `radius` | Sets the keypoint radius for visualization. | -| `thickness` | Sets the link thickness for visualization. | -| `return_vis` | Determines whether visualization images should be included in the results. | -| `vis_out_dir` | Specifies the folder path for saving the visualization images. If not set, the visualization images will not be saved. | -| `return_datasample` | Determines whether to return the prediction in the format of `PoseDataSample`. | -| `pred_out_dir` | Specifies the folder path for saving the predictions. If not set, the predictions will not be saved. | -| `out_dir` | If `vis_out_dir` or `pred_out_dir` is not set, the values will be set to `f'{out_dir}/visualization'` or `f'{out_dir}/predictions'`, respectively. | +The inferencer is designed for both visualization and saving predictions. The table below presents the list of arguments available when using the `MMPoseInferencer` for inference, along with their compatibility with 2D and 3D inferencing: + +| Argument | Description | 2D | 3D | +| ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | --- | --- | +| `show` | Controls the display of the image or video in a pop-up window. | ✔️ | ✔️ | +| `radius` | Sets the visualization keypoint radius. | ✔️ | ✔️ | +| `thickness` | Determines the link thickness for visualization. | ✔️ | ✔️ | +| `kpt_thr` | Sets the keypoint score threshold. Keypoints with scores exceeding this threshold will be displayed. | ✔️ | ✔️ | +| `draw_bbox` | Decides whether to display the bounding boxes of instances. | ✔️ | ✔️ | +| `draw_heatmap` | Decides if the predicted heatmaps should be drawn. | ✔️ | ❌ | +| `black_background` | Decides whether the estimated poses should be displayed on a black background. | ✔️ | ❌ | +| `skeleton_style` | Sets the skeleton style. Options include 'mmpose' (default) and 'openpose'. | ✔️ | ❌ | +| `use_oks_tracking` | Decides whether to use OKS as a similarity measure in tracking. | ❌ | ✔️ | +| `tracking_thr` | Sets the similarity threshold for tracking. | ❌ | ✔️ | +| `norm_pose_2d` | Decides whether to scale the bounding box to the dataset's average bounding box scale and relocate the bounding box to the dataset's average bounding box center. | ❌ | ✔️ | +| `return_vis` | Decides whether to include visualization images in the results. | ✔️ | ✔️ | +| `vis_out_dir` | Defines the folder path to save the visualization images. If unset, the visualization images will not be saved. | ✔️ | ✔️ | +| `return_datasample` | Determines if the prediction should be returned in the `PoseDataSample` format. | ✔️ | ✔️ | +| `pred_out_dir` | Specifies the folder path to save the predictions. If unset, the predictions will not be saved. | ✔️ | ✔️ | +| `out_dir` | If `vis_out_dir` or `pred_out_dir` is unset, these will be set to `f'{out_dir}/visualization'` or `f'{out_dir}/predictions'`, respectively. | ✔️ | ✔️ | ### Model Alias diff --git a/docs/zh_cn/user_guides/inference.md b/docs/zh_cn/user_guides/inference.md index 6de4139fe9..3b764d659c 100644 --- a/docs/zh_cn/user_guides/inference.md +++ b/docs/zh_cn/user_guides/inference.md @@ -31,9 +31,44 @@ result = next(result_generator) ![inferencer_result_coco](https://user-images.githubusercontent.com/26127467/220008302-4a57fd44-0978-408e-8351-600e5513316a.jpg) -在上述示例中,变量`result`是一个字典,包含两个键,分别是`visualization`和`predictions`。`visualization`用于存储可视化结果,但由于没有设定参数`return_vis`,因此该列表为空。但是`predictions`保存了每个检测到的实例的、估计得到的关键点列表。 +`result` 变量是一个包含两个键值 `'visualization'` 和 `'predictions'` 的字典。 -还可以使用用于用于推断的**命令行界面工具**(CLI, command-line interface):`demo/inferencer_demo.py`。这个工具允许用户使用以下命令使用相同的模型和输入执行推理: +- `'visualization'` 键对应的值是一个列表,该列表: + - 包含可视化结果,例如输入图像、估计姿态的标记,以及可选的预测热图。 + - 如果没有指定 `return_vis` 参数,该列表将保持为空。 +- `'predictions'` 键对应的值是: + - 一个包含每个检测实例的预估关键点的列表。 + +`result` 字典的结构如下所示: + +```python +result = { + 'visualization': [ + # 元素数量:batch_size(默认为1) + vis_image_1, + ... + ], + 'predictions': [ + # 每张图像的姿态估计结果 + # 元素数量:batch_size(默认为1) + [ + # 每个检测到的实例的姿态信息 + # 元素数量:检测到的实例数 + {'keypoints': ..., # 实例 1 + 'keypoint_scores': ..., + ... + }, + {'keypoints': ..., # 实例 2 + 'keypoint_scores': ..., + ... + }, + ] + ... + ] +} +``` + +还可以使用用于用于推断的**命令行界面工具**(CLI, command-line interface): `demo/inferencer_demo.py`。这个工具允许用户使用以下命令使用相同的模型和输入执行推理: ```python python demo/inferencer_demo.py 'tests/data/coco/000000000785.jpg' \ @@ -163,28 +198,38 @@ result = next(result_generator) `MMPoseInferencer`提供了各种自定义姿态估计、可视化和保存预测结果的参数。下面是初始化推断器时可用的参数列表及对这些参数的描述: -| Argument | Description | -| ---------------- | ---------------------------------------------------------- | -| `pose2d` | 指定2D姿态估计模型的模型别名、配置文件名称或配置文件路径。 | -| `pose2d_weights` | 指定2D姿态估计模型权重文件的URL或本地路径。 | -| `det_model` | 指定对象检测模型的模型别名、配置文件名或配置文件路径。 | -| `det_weights` | 指定对象检测模型权重文件的URL或本地路径。 | -| `det_cat_ids` | 指定与要检测的对象类对应的类别id列表。 | -| `device` | 执行推理的设备。如果为`None`,推理器将选择最合适的一个。 | -| `scope` | 定义模型模块的名称空间 | - -推理器设计用于处理预测的可视化和保存。下面是使用`MMPoseInferencer`执行推理时可用的参数列表: - -| Argument | Description | -| ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `show` | 确定图像或视频的预测结果是否应在弹出窗口中显示。 | -| `radius` | 设置关键点半径。 | -| `thickness` | 设置骨架(线条)粗细。 | -| `return_vis` | 确定返回结果`result`中是否应包括可视化结果列表`visualization`。 | -| `vis_out_dir` | 指定保存可视化图像的文件夹路径。如果未设置,将不会保存可视化图像。 | -| `return_datasample` | 确定是否以`PoseDataSample`的形式返回预测。 | -| `pred_out_dir` | 指定保存预测结果`predictions`的文件夹路径。如果不设置,预测结果将不会被保存。 | -| `out_dir` | 如果指定了输出路径参数`out_dir`,但未设置`vis_out_dir`或`pred_out_dir`,则分别将`vis_out_dir`或`pred_out_dir`设置为`f'{out_dir}/visualization'`或` f'{out_dir}/ forecasts'`。 | +| Argument | Description | +| ---------------- | ------------------------------------------------------------ | +| `pose2d` | 指定 2D 姿态估计模型的模型别名、配置文件名称或配置文件路径。 | +| `pose2d_weights` | 指定 2D 姿态估计模型权重文件的URL或本地路径。 | +| `pose3d` | 指定 3D 姿态估计模型的模型别名、配置文件名称或配置文件路径。 | +| `pose3d_weights` | 指定 3D 姿态估计模型权重文件的URL或本地路径。 | +| `det_model` | 指定对象检测模型的模型别名、配置文件名或配置文件路径。 | +| `det_weights` | 指定对象检测模型权重文件的 URL 或本地路径。 | +| `det_cat_ids` | 指定与要检测的对象类对应的类别 id 列表。 | +| `device` | 执行推理的设备。如果为 `None`,推理器将选择最合适的一个。 | +| `scope` | 定义模型模块的名称空间 | + +推理器被设计用于可视化和保存预测。以下表格列出了在使用 `MMPoseInferencer` 进行推断时可用的参数列表,以及它们与 2D 和 3D 推理器的兼容性: + +| 参数 | 描述 | 2D | 3D | +| ------------------- | -------------------------------------------------------------------------------------------------------------------------- | --- | --- | +| `show` | 控制是否在弹出窗口中显示图像或视频。 | ✔️ | ✔️ | +| `radius` | 设置可视化关键点的半径。 | ✔️ | ✔️ | +| `thickness` | 确定可视化链接的厚度。 | ✔️ | ✔️ | +| `kpt_thr` | 设置关键点分数阈值。分数超过此阈值的关键点将被显示。 | ✔️ | ✔️ | +| `draw_bbox` | 决定是否显示实例的边界框。 | ✔️ | ✔️ | +| `draw_heatmap` | 决定是否绘制预测的热图。 | ✔️ | ❌ | +| `black_background` | 决定是否在黑色背景上显示预估的姿势。 | ✔️ | ❌ | +| `skeleton_style` | 设置骨架样式。可选项包括 'mmpose'(默认)和 'openpose'。 | ✔️ | ❌ | +| `use_oks_tracking` | 决定是否在追踪中使用OKS作为相似度测量。 | ❌ | ✔️ | +| `tracking_thr` | 设置追踪的相似度阈值。 | ❌ | ✔️ | +| `norm_pose_2d` | 决定是否将边界框缩放至数据集的平均边界框尺寸,并将边界框移至数据集的平均边界框中心。 | ❌ | ✔️ | +| `return_vis` | 决定是否在结果中包含可视化图像。 | ✔️ | ✔️ | +| `vis_out_dir` | 定义保存可视化图像的文件夹路径。如果未设置,将不保存可视化图像。 | ✔️ | ✔️ | +| `return_datasample` | 决定是否以 `PoseDataSample` 格式返回预测。 | ✔️ | ✔️ | +| `pred_out_dir` | 指定保存预测的文件夹路径。如果未设置,将不保存预测。 | ✔️ | ✔️ | +| `out_dir` | 如果 `vis_out_dir` 或 `pred_out_dir` 未设置,它们将分别设置为 `f'{out_dir}/visualization'` 或 `f'{out_dir}/predictions'`。 | ✔️ | ✔️ | ### 模型别名 diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index 985c775967..0ea6e9c156 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -125,6 +125,8 @@ def _inputs_to_list(self, inputs: InputsType) -> Iterable: fps=video.fps, name=os.path.basename(inputs), writer=None, + width=video.width, + height=video.height, predictions=[]) inputs = video elif input_type == 'image': diff --git a/mmpose/apis/inferencers/pose3d_inferencer.py b/mmpose/apis/inferencers/pose3d_inferencer.py index d5b2a2998d..a60bcb9d4b 100644 --- a/mmpose/apis/inferencers/pose3d_inferencer.py +++ b/mmpose/apis/inferencers/pose3d_inferencer.py @@ -1,11 +1,9 @@ # Copyright (c) OpenMMLab. All rights reserved. -import mimetypes import os import warnings from collections import defaultdict from functools import partial -from typing import (Callable, Dict, Iterable, List, Optional, Sequence, Tuple, - Union) +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union import cv2 import mmcv @@ -125,35 +123,6 @@ def __init__(self, self._video_input = False self._buffer = defaultdict(list) - def _inputs_to_list(self, inputs: InputsType) -> Iterable: - """Preprocess the inputs to a listaccording to its type - Args: - inputs (InputsType): Inputs for the inferencer. - - Returns: - list: List of input for the :meth:`preprocess`. - """ - self._video_input = False - - if isinstance(inputs, str) and not os.path.isdir(inputs) and \ - mimetypes.guess_type(inputs)[0].split('/')[0] == 'video': - - self._video_input = True - video = mmcv.VideoReader(inputs) - self.video_info = dict( - fps=video.fps, - name=os.path.basename(inputs), - writer=None, - width=video.width, - height=video.height, - predictions=[]) - inputs = video - else: - raise ValueError(f'Pose 3d inferencer expects input to be a ' - f'video path, but received {inputs}.') - - return inputs - def preprocess_single(self, input: InputType, index: int, @@ -206,6 +175,18 @@ def preprocess_single(self, (ds.pred_instances.bboxes[..., 2:] - ds.pred_instances.bboxes[..., :2]).prod(-1), 'areas') + if not self._video_input: + height, width = results_pose2d[0].metainfo['ori_shape'] + + # Clear the buffer if inputs are individual images to prevent + # carryover effects from previous images + self._buffer.clear() + + else: + height = self.video_info['height'] + width = self.video_info['width'] + img_path = results_pose2d[0].metainfo['img_path'] + # instance matching if use_oks_tracking: _track = partial(_track_by_oks) @@ -241,7 +222,8 @@ def preprocess_single(self, # extract and pad input pose2d sequence pose_results_2d = self._pose_seq_extractor( - self._buffer['pose_est_results_list'], frame_idx=index) + self._buffer['pose_est_results_list'], + frame_idx=index if self._video_input else 0) causal = self.cfg.test_dataloader.dataset.get('causal', False) target_idx = -1 if causal else len(pose_results_2d) // 2 @@ -292,11 +274,13 @@ def preprocess_single(self, data_info['lifting_target'] = np.zeros((K, 3), dtype=np.float32) data_info['lifting_target_visible'] = np.ones((K, 1), dtype=np.float32) - data_info['camera_param'] = dict( - w=self.video_info['width'], h=self.video_info['height']) + data_info['camera_param'] = dict(w=width, h=height) data_info.update(self.model.dataset_meta) - data_list.append(self.pipeline(data_info)) + data_info = self.pipeline(data_info) + data_info['data_samples'].set_field( + img_path, 'img_path', field_type='metainfo') + data_list.append(data_info) return data_list @@ -520,7 +504,10 @@ def visualize(self, self.video_info['writer'].write(out_img) else: - assert False + img_name = os.path.basename(pred.metainfo['img_path']) + file_name = file_name if file_name else img_name + out_file = join_path(dir_name, file_name) + mmcv.imwrite(out_img, out_file) if return_vis: return results diff --git a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py index 356b38dddc..09c31d3d64 100644 --- a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py +++ b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py @@ -11,6 +11,7 @@ import torch from mmpose.apis.inferencers import Pose2DInferencer, Pose3DInferencer +from mmpose.structures import PoseDataSample class TestPose3DInferencer(TestCase): @@ -95,18 +96,40 @@ def test_call(self): # `inputs` is path to an image inputs = img_path - with self.assertRaises(ValueError): - results = next(inferencer(inputs, return_vis=True)) + results1 = next(inferencer(inputs, return_vis=True)) + self.assertIn('visualization', results1) + self.assertIn('predictions', results1) + self.assertIn('keypoints', results1['predictions'][0][0]) + self.assertEqual(len(results1['predictions'][0][0]['keypoints']), 17) # `inputs` is an image array inputs = img - with self.assertRaises(ValueError): - results = next(inferencer(inputs)) + results2 = next(inferencer(inputs)) + self.assertEqual( + len(results1['predictions'][0]), len(results2['predictions'][0])) + self.assertSequenceEqual(results1['predictions'][0][0]['keypoints'], + results2['predictions'][0][0]['keypoints']) + results2 = next(inferencer(inputs, return_datasample=True)) + self.assertIsInstance(results2['predictions'][0], PoseDataSample) # `inputs` is path to a directory inputs = osp.dirname(img_path) - with self.assertRaises(ValueError): - results = next(inferencer(inputs)) + + with TemporaryDirectory() as tmp_dir: + # only save visualizations + for res in inferencer(inputs, vis_out_dir=tmp_dir): + pass + self.assertEqual(len(os.listdir(tmp_dir)), 4) + # save both visualizations and predictions + results3 = defaultdict(list) + for res in inferencer(inputs, out_dir=tmp_dir): + for key in res: + results3[key].extend(res[key]) + self.assertEqual(len(os.listdir(f'{tmp_dir}/visualizations')), 4) + self.assertEqual(len(os.listdir(f'{tmp_dir}/predictions')), 4) + self.assertEqual(len(results3['predictions']), 4) + self.assertSequenceEqual(results1['predictions'][0][0]['keypoints'], + results3['predictions'][3][0]['keypoints']) # `inputs` is path to a video inputs = 'https://user-images.githubusercontent.com/87690686/' \ From 18b41877c504df523c61d77e605e009532c5ab36 Mon Sep 17 00:00:00 2001 From: Yifan Lareina WU Date: Fri, 16 Jun 2023 11:33:10 +0800 Subject: [PATCH 29/52] [Enhance] Add 3d human pose demo for image (#2459) --- demo/body3d_pose_lifter_demo.py | 336 ++++++++++--------- demo/docs/en/3d_human_pose_demo.md | 9 +- mmpose/apis/inferencers/pose3d_inferencer.py | 9 +- mmpose/visualization/local_visualizer_3d.py | 125 ++++--- 4 files changed, 249 insertions(+), 230 deletions(-) diff --git a/demo/body3d_pose_lifter_demo.py b/demo/body3d_pose_lifter_demo.py index f3f8eb58d2..0a29973501 100644 --- a/demo/body3d_pose_lifter_demo.py +++ b/demo/body3d_pose_lifter_demo.py @@ -144,6 +144,141 @@ def get_area(results): return results +def get_pose_est_results(args, pose_estimator, frame, bboxes, + pose_est_results_last, next_id, pose_lift_dataset): + pose_det_dataset = pose_estimator.cfg.test_dataloader.dataset + + # make person results for current image + pose_est_results = inference_topdown(pose_estimator, frame, bboxes) + + pose_est_results = get_area(pose_est_results) + if args.use_oks_tracking: + _track = partial(_track_by_oks) + else: + _track = _track_by_iou + + for i, result in enumerate(pose_est_results): + track_id, pose_est_results_last, match_result = _track( + result, pose_est_results_last, args.tracking_thr) + if track_id == -1: + pred_instances = result.pred_instances.cpu().numpy() + keypoints = pred_instances.keypoints + if np.count_nonzero(keypoints[:, :, 1]) >= 3: + pose_est_results[i].set_field(next_id, 'track_id') + next_id += 1 + else: + # If the number of keypoints detected is small, + # delete that person instance. + keypoints[:, :, 1] = -10 + pose_est_results[i].pred_instances.set_field( + keypoints, 'keypoints') + bboxes = pred_instances.bboxes * 0 + pose_est_results[i].pred_instances.set_field(bboxes, 'bboxes') + pose_est_results[i].set_field(-1, 'track_id') + pose_est_results[i].set_field(pred_instances, 'pred_instances') + else: + pose_est_results[i].set_field(track_id, 'track_id') + + del match_result + + pose_est_results_converted = [] + for pose_est_result in pose_est_results: + pose_est_result_converted = PoseDataSample() + gt_instances = InstanceData() + pred_instances = InstanceData() + for k in pose_est_result.gt_instances.keys(): + gt_instances.set_field(pose_est_result.gt_instances[k], k) + for k in pose_est_result.pred_instances.keys(): + pred_instances.set_field(pose_est_result.pred_instances[k], k) + pose_est_result_converted.gt_instances = gt_instances + pose_est_result_converted.pred_instances = pred_instances + pose_est_result_converted.track_id = pose_est_result.track_id + + keypoints = convert_keypoint_definition(pred_instances.keypoints, + pose_det_dataset['type'], + pose_lift_dataset['type']) + pose_est_result_converted.pred_instances.keypoints = keypoints + pose_est_results_converted.append(pose_est_result_converted) + return pose_est_results, pose_est_results_converted, next_id + + +def get_pose_lift_results(args, visualizer, pose_lifter, pose_est_results_list, + frame, frame_idx, pose_est_results): + pose_lift_dataset = pose_lifter.cfg.test_dataloader.dataset + # extract and pad input pose2d sequence + pose_seq_2d = extract_pose_sequence( + pose_est_results_list, + frame_idx=frame_idx, + causal=pose_lift_dataset.get('causal', False), + seq_len=pose_lift_dataset.get('seq_len', 1), + step=pose_lift_dataset.get('seq_step', 1)) + + # 2D-to-3D pose lifting + width, height = frame.shape[:2] + pose_lift_results = inference_pose_lifter_model( + pose_lifter, + pose_seq_2d, + image_size=(width, height), + norm_pose_2d=args.norm_pose_2d) + + # Pose processing + for idx, pose_lift_res in enumerate(pose_lift_results): + pose_lift_res.track_id = pose_est_results[idx].get('track_id', 1e4) + + pred_instances = pose_lift_res.pred_instances + keypoints = pred_instances.keypoints + # print(keypoints) + keypoint_scores = pred_instances.keypoint_scores + if keypoint_scores.ndim == 3: + keypoint_scores = np.squeeze(keypoint_scores, axis=1) + pose_lift_results[ + idx].pred_instances.keypoint_scores = keypoint_scores + if keypoints.ndim == 4: + keypoints = np.squeeze(keypoints, axis=1) + + keypoints = keypoints[..., [0, 2, 1]] + keypoints[..., 0] = -keypoints[..., 0] + keypoints[..., 2] = -keypoints[..., 2] + + # rebase height (z-axis) + if args.rebase_keypoint_height: + keypoints[..., 2] -= np.min( + keypoints[..., 2], axis=-1, keepdims=True) + + pose_lift_results[idx].pred_instances.keypoints = keypoints + + pose_lift_results = sorted( + pose_lift_results, key=lambda x: x.get('track_id', 1e4)) + + pred_3d_data_samples = merge_data_samples(pose_lift_results) + det_data_sample = merge_data_samples(pose_est_results) + + # Visualization + if visualizer is not None: + visualizer.add_datasample( + 'result', + frame, + data_sample=pred_3d_data_samples, + det_data_sample=det_data_sample, + draw_gt=False, + show=args.show, + draw_bbox=True, + kpt_thr=args.kpt_thr, + wait_time=args.show_interval) + + return pred_3d_data_samples.get('pred_instances', None) + + +def get_bbox(args, detector, frame): + det_result = inference_detector(detector, frame) + pred_instance = det_result.pred_instances.cpu().numpy() + + bboxes = pred_instance.bboxes + bboxes = bboxes[np.logical_and(pred_instance.labels == args.det_cat_id, + pred_instance.scores > args.bbox_thr)] + return bboxes + + def main(): assert has_mmdet, 'Please install mmdet to run the demo.' @@ -178,8 +313,6 @@ def main(): indices = pose_estimator.cfg.test_dataloader.dataset[ 'frame_indices_test'] - pose_det_dataset = pose_estimator.cfg.test_dataloader.dataset - pose_lifter = init_model( args.pose_lifter_config, args.pose_lifter_checkpoint, @@ -192,10 +325,13 @@ def main(): pose_lifter.cfg.visualizer.radius = args.radius pose_lifter.cfg.visualizer.line_width = args.thickness - local_visualizer = VISUALIZERS.build(pose_lifter.cfg.visualizer) + pose_lifter.cfg.visualizer.det_kpt_color = det_kpt_color + pose_lifter.cfg.visualizer.det_dataset_skeleton = det_dataset_skeleton + pose_lifter.cfg.visualizer.det_dataset_link_color = det_dataset_link_color + visualizer = VISUALIZERS.build(pose_lifter.cfg.visualizer) # the dataset_meta is loaded from the checkpoint - local_visualizer.set_dataset_meta(pose_lifter.dataset_meta) + visualizer.set_dataset_meta(pose_lifter.dataset_meta) if args.input == 'webcam': input_type = 'webcam' @@ -203,28 +339,49 @@ def main(): input_type = mimetypes.guess_type(args.input)[0].split('/')[0] if args.output_root == '': - save_out_video = False + save_output = False else: mmengine.mkdir_or_exist(args.output_root) output_file = os.path.join(args.output_root, os.path.basename(args.input)) if args.input == 'webcam': output_file += '.mp4' - save_out_video = True + save_output = True if args.save_predictions: assert args.output_root != '' args.pred_save_path = f'{args.output_root}/results_' \ f'{os.path.splitext(os.path.basename(args.input))[0]}.json' - if save_out_video: + if save_output: fourcc = cv2.VideoWriter_fourcc(*'mp4v') pose_est_results_list = [] - next_id = 0 - pose_est_results = [] + pred_instances_list = [] + if input_type == 'image': + frame = mmcv.imread(args.input, channel_order='rgb') + + # First stage: 2D pose detection + bboxes = get_bbox(args, detector, frame) + pose_est_results, pose_est_results_converted, _ = get_pose_est_results( + args, pose_estimator, frame, bboxes, [], 0, pose_lift_dataset) + pose_est_results_list.append(pose_est_results_converted.copy()) + pred_3d_pred = get_pose_lift_results(args, visualizer, pose_lifter, + pose_est_results_list, frame, 0, + pose_est_results) + + if args.save_predictions: + # save prediction results + pred_instances_list = split_instances(pred_3d_pred) + + if save_output: + frame_vis = visualizer.get_image() + mmcv.imwrite(mmcv.rgb2bgr(frame_vis), output_file) + + elif input_type in ['webcam', 'video']: + next_id = 0 + pose_est_results_converted = [] - if input_type in ['webcam', 'video']: if args.input == 'webcam': video = cv2.VideoCapture(0) else: @@ -233,15 +390,10 @@ def main(): (major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.') if int(major_ver) < 3: fps = video.get(cv2.cv.CV_CAP_PROP_FPS) - width = video.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) - height = video.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) else: fps = video.get(cv2.CAP_PROP_FPS) - width = video.get(cv2.CAP_PROP_FRAME_WIDTH) - height = video.get(cv2.CAP_PROP_FRAME_HEIGHT) video_writer = None - pred_instances_list = [] frame_idx = 0 while video.isOpened(): @@ -251,161 +403,36 @@ def main(): if not success: break - pose_est_results_last = pose_est_results + pose_est_results_last = pose_est_results_converted # First stage: 2D pose detection - # test a single image, the resulting box is (x1, y1, x2, y2) - det_result = inference_detector(detector, frame) - pred_instance = det_result.pred_instances.cpu().numpy() - - bboxes = pred_instance.bboxes - bboxes = bboxes[np.logical_and( - pred_instance.labels == args.det_cat_id, - pred_instance.scores > args.bbox_thr)] - if args.use_multi_frames: frames = collect_multi_frames(video, frame_idx, indices, args.online) # make person results for current image - pose_est_results = inference_topdown( - pose_estimator, frames if args.use_multi_frames else frame, - bboxes) - - pose_est_results = get_area(pose_est_results) - if args.use_oks_tracking: - _track = partial(_track_by_oks) - else: - _track = _track_by_iou - - for i, result in enumerate(pose_est_results): - track_id, pose_est_results_last, match_result = _track( - result, pose_est_results_last, args.tracking_thr) - if track_id == -1: - pred_instances = result.pred_instances.cpu().numpy() - keypoints = pred_instances.keypoints - if np.count_nonzero(keypoints[:, :, 1]) >= 3: - pose_est_results[i].set_field(next_id, 'track_id') - next_id += 1 - else: - # If the number of keypoints detected is small, - # delete that person instance. - keypoints[:, :, 1] = -10 - pose_est_results[i].pred_instances.set_field( - keypoints, 'keypoints') - bboxes = pred_instances.bboxes * 0 - pose_est_results[i].pred_instances.set_field( - bboxes, 'bboxes') - pose_est_results[i].set_field(-1, 'track_id') - pose_est_results[i].set_field(pred_instances, - 'pred_instances') - else: - pose_est_results[i].set_field(track_id, 'track_id') - - del match_result - - pose_est_results_converted = [] - for pose_est_result in pose_est_results: - pose_est_result_converted = PoseDataSample() - gt_instances = InstanceData() - pred_instances = InstanceData() - for k in pose_est_result.gt_instances.keys(): - gt_instances.set_field(pose_est_result.gt_instances[k], k) - for k in pose_est_result.pred_instances.keys(): - pred_instances.set_field(pose_est_result.pred_instances[k], - k) - pose_est_result_converted.gt_instances = gt_instances - pose_est_result_converted.pred_instances = pred_instances - pose_est_result_converted.track_id = pose_est_result.track_id - - keypoints = convert_keypoint_definition( - pred_instances.keypoints, pose_det_dataset['type'], - pose_lift_dataset['type']) - pose_est_result_converted.pred_instances.keypoints = keypoints - pose_est_results_converted.append(pose_est_result_converted) - + bboxes = get_bbox(args, detector, frame) + pose_est_results, pose_est_results_converted, next_id = get_pose_est_results( # noqa: E501 + args, pose_estimator, + frames if args.use_multi_frames else frame, bboxes, + pose_est_results_last, next_id, pose_lift_dataset) pose_est_results_list.append(pose_est_results_converted.copy()) - # extract and pad input pose2d sequence - pose_results_2d = extract_pose_sequence( - pose_est_results_list, - frame_idx=frame_idx, - causal=pose_lift_dataset.get('causal', False), - seq_len=pose_lift_dataset.get('seq_len', 1), - step=pose_lift_dataset.get('seq_step', 1)) - # Second stage: Pose lifting - # 2D-to-3D pose lifting - pose_lift_results = inference_pose_lifter_model( - pose_lifter, - pose_results_2d, - image_size=(width, height), - norm_pose_2d=args.norm_pose_2d) - - # Pose processing - for idx, pose_lift_res in enumerate(pose_lift_results): - gt_instances = pose_lift_res.gt_instances - - pose_lift_res.track_id = pose_est_results_converted[idx].get( - 'track_id', 1e4) - - pred_instances = pose_lift_res.pred_instances - keypoints = pred_instances.keypoints - keypoint_scores = pred_instances.keypoint_scores - if keypoint_scores.ndim == 3: - keypoint_scores = np.squeeze(keypoint_scores, axis=1) - pose_lift_results[ - idx].pred_instances.keypoint_scores = keypoint_scores - if keypoints.ndim == 4: - keypoints = np.squeeze(keypoints, axis=1) - - keypoints = keypoints[..., [0, 2, 1]] - keypoints[..., 0] = -keypoints[..., 0] - keypoints[..., 2] = -keypoints[..., 2] - - # rebase height (z-axis) - if args.rebase_keypoint_height: - keypoints[..., 2] -= np.min( - keypoints[..., 2], axis=-1, keepdims=True) - - pose_lift_results[idx].pred_instances.keypoints = keypoints - - pose_lift_results = sorted( - pose_lift_results, key=lambda x: x.get('track_id', 1e4)) - - pred_3d_data_samples = merge_data_samples(pose_lift_results) - - # Visualization - frame = mmcv.bgr2rgb(frame) - - det_data_sample = merge_data_samples(pose_est_results) - - if local_visualizer is not None: - local_visualizer.add_datasample( - 'result', - frame, - data_sample=pred_3d_data_samples, - det_data_sample=det_data_sample, - draw_gt=False, - det_kpt_color=det_kpt_color, - det_dataset_skeleton=det_dataset_skeleton, - det_dataset_link_color=det_dataset_link_color, - show=args.show, - draw_bbox=True, - kpt_thr=args.kpt_thr, - wait_time=args.show_interval) - - frame_vis = local_visualizer.get_image() + pred_3d_pred = get_pose_lift_results(args, visualizer, pose_lifter, + pose_est_results_list, + mmcv.bgr2rgb(frame), + frame_idx, pose_est_results) if args.save_predictions: # save prediction results pred_instances_list.append( dict( frame_id=frame_idx, - instances=split_instances( - pred_3d_data_samples.get('pred_instances', None)))) + instances=split_instances(pred_3d_pred))) - if save_out_video: + if save_output: + frame_vis = visualizer.get_image() if video_writer is None: # the size of the image with visualization may vary # depending on the presence of heatmaps @@ -420,9 +447,6 @@ def main(): break time.sleep(args.show_interval) - if frame_idx == 50: - break - video.release() if video_writer: diff --git a/demo/docs/en/3d_human_pose_demo.md b/demo/docs/en/3d_human_pose_demo.md index be0c84fdec..3a28c950f0 100644 --- a/demo/docs/en/3d_human_pose_demo.md +++ b/demo/docs/en/3d_human_pose_demo.md @@ -2,7 +2,7 @@
    -### 3D Human Pose Two-stage Estimation Video Demo +### 3D Human Pose Two-stage Estimation Demo #### Using mmdet for human bounding box detection and top-down model for the 1st stage (2D pose detection), and inference the 2nd stage (2D-to-3D lifting) @@ -16,11 +16,12 @@ ${MMPOSE_CONFIG_FILE_2D} \ ${MMPOSE_CHECKPOINT_FILE_2D} \ ${MMPOSE_CONFIG_FILE_3D} \ ${MMPOSE_CHECKPOINT_FILE_3D} \ ---input ${VIDEO_PATH} \ +--input ${VIDEO_PATH or IMAGE_PATH or 'webcam'} \ [--show] \ [--rebase-keypoint-height] \ [--norm-pose-2d] \ [--output-root ${OUT_VIDEO_ROOT}] \ +[--save-predictions] [--save-predictions] \ [--device ${GPU_ID or CPU}] \ [--det-cat-id DET_CAT_ID] \ @@ -44,7 +45,7 @@ Note that Examples: -During 2D pose detection, for single-frame inference that do not rely on extra frames to get the final results of the current frame, try this: +During 2D pose detection, for single-frame inference that do not rely on extra frames to get the final results of the current frame and save the prediction results, try this: ```shell python demo/body3d_pose_lifter_demo.py \ @@ -56,7 +57,7 @@ configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth \ --input https://user-images.githubusercontent.com/87690686/164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4 \ --output-root vis_results \ ---rebase-keypoint-height --save-predictions +--rebase-keypoint-height --save-predictions ``` During 2D pose detection, for multi-frame inference that rely on extra frames to get the final results of the current frame, try this: diff --git a/mmpose/apis/inferencers/pose3d_inferencer.py b/mmpose/apis/inferencers/pose3d_inferencer.py index a60bcb9d4b..0fe66ac72b 100644 --- a/mmpose/apis/inferencers/pose3d_inferencer.py +++ b/mmpose/apis/inferencers/pose3d_inferencer.py @@ -447,6 +447,12 @@ def visualize(self, self.visualizer.radius = radius self.visualizer.line_width = thickness + det_kpt_color = self.pose2d_model.visualizer.kpt_color + det_dataset_skeleton = self.pose2d_model.visualizer.skeleton + det_dataset_link_color = self.pose2d_model.visualizer.link_color + self.visualizer.det_kpt_color = det_kpt_color + self.visualizer.det_dataset_skeleton = det_dataset_skeleton + self.visualizer.det_dataset_link_color = det_dataset_link_color results = [] @@ -470,9 +476,6 @@ def visualize(self, data_sample=pred, det_data_sample=self._buffer['pose2d_results'], draw_gt=False, - det_kpt_color=self.pose2d_model.visualizer.kpt_color, - det_dataset_skeleton=self.pose2d_model.visualizer.skeleton, - det_dataset_link_color=self.pose2d_model.visualizer.link_color, draw_bbox=draw_bbox, show=show, wait_time=wait_time, diff --git a/mmpose/visualization/local_visualizer_3d.py b/mmpose/visualization/local_visualizer_3d.py index 764a85dee2..569f69c724 100644 --- a/mmpose/visualization/local_visualizer_3d.py +++ b/mmpose/visualization/local_visualizer_3d.py @@ -38,28 +38,41 @@ class Pose3dLocalVisualizer(PoseLocalVisualizer): show_keypoint_weight (bool): Whether to adjust the transparency of keypoints according to their score. Defaults to ``False`` alpha (int, float): The transparency of bboxes. Defaults to ``0.8`` + det_kpt_color (str, tuple(tuple(int)), optional): Keypoints color + info for detection. Defaults to ``None`` + det_dataset_skeleton (list): Skeleton info for detection. Defaults to + ``None`` + det_dataset_link_color (list): Link color for detection. Defaults to + ``None`` """ - def __init__(self, - name: str = 'visualizer', - image: Optional[np.ndarray] = None, - vis_backends: Optional[Dict] = None, - save_dir: Optional[str] = None, - bbox_color: Optional[Union[str, Tuple[int]]] = 'green', - kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = 'red', - link_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, - text_color: Optional[Union[str, - Tuple[int]]] = (255, 255, 255), - skeleton: Optional[Union[List, Tuple]] = None, - line_width: Union[int, float] = 1, - radius: Union[int, float] = 3, - show_keypoint_weight: bool = False, - backend: str = 'opencv', - alpha: float = 0.8): + def __init__( + self, + name: str = 'visualizer', + image: Optional[np.ndarray] = None, + vis_backends: Optional[Dict] = None, + save_dir: Optional[str] = None, + bbox_color: Optional[Union[str, Tuple[int]]] = 'green', + kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = 'red', + link_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, + text_color: Optional[Union[str, Tuple[int]]] = (255, 255, 255), + skeleton: Optional[Union[List, Tuple]] = None, + line_width: Union[int, float] = 1, + radius: Union[int, float] = 3, + show_keypoint_weight: bool = False, + backend: str = 'opencv', + alpha: float = 0.8, + det_kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, + det_dataset_skeleton: Optional[Union[str, + Tuple[Tuple[int]]]] = None, + det_dataset_link_color: Optional[np.ndarray] = None): super().__init__(name, image, vis_backends, save_dir, bbox_color, kpt_color, link_color, text_color, skeleton, line_width, radius, show_keypoint_weight, backend, alpha) + self.det_kpt_color = det_kpt_color + self.det_dataset_skeleton = det_dataset_skeleton + self.det_dataset_link_color = det_dataset_link_color def _draw_3d_data_samples( self, @@ -260,16 +273,12 @@ def _draw_3d_instances_kpts(keypoints, return pred_img_data - def _draw_instances_kpts( - self, - image: np.ndarray, - instances: InstanceData, - kpt_thr: float = 0.3, - show_kpt_idx: bool = False, - skeleton_style: str = 'mmpose', - det_kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, - det_dataset_skeleton: Optional[List] = None, - det_dataset_link_color: Optional[np.ndarray] = None): + def _draw_instances_kpts(self, + image: np.ndarray, + instances: InstanceData, + kpt_thr: float = 0.3, + show_kpt_idx: bool = False, + skeleton_style: str = 'mmpose'): """Draw keypoints and skeletons (optional) of GT or prediction. Args: @@ -282,12 +291,6 @@ def _draw_instances_kpts( Defaults to ``False`` skeleton_style (str): Skeleton style selection. Defaults to ``'mmpose'`` - det_kpt_color (str, tuple(tuple(int)), optional): Keypoints - color info for detection. Defaults to ``None`` - det_dataset_skeleton (list): Skeleton info for detection. Defaults - to ``None`` - det_dataset_link_color (list): Link color for detection. Defaults - to ``None`` Returns: np.ndarray: the drawn image which channel is RGB. @@ -338,8 +341,8 @@ def _draw_instances_kpts( ..., :2], keypoints_info[..., 2], keypoints_info[..., 3] kpt_color = self.kpt_color - if det_kpt_color is not None: - kpt_color = det_kpt_color + if self.det_kpt_color is not None: + kpt_color = self.det_kpt_color for kpts, score, visible in zip(keypoints, scores, keypoints_visible): @@ -385,11 +388,11 @@ def _draw_instances_kpts( # draw links skeleton = self.skeleton - if det_dataset_skeleton is not None: - skeleton = det_dataset_skeleton + if self.det_dataset_skeleton is not None: + skeleton = self.det_dataset_skeleton link_color = self.link_color - if det_dataset_link_color is not None: - link_color = det_dataset_link_color + if self.det_dataset_link_color is not None: + link_color = self.det_dataset_link_color if skeleton is not None and link_color is not None: if link_color is None or isinstance(link_color, str): link_color = [link_color] * len(skeleton) @@ -450,27 +453,22 @@ def _draw_instances_kpts( return self.get_image() @master_only - def add_datasample( - self, - name: str, - image: np.ndarray, - data_sample: PoseDataSample, - det_data_sample: Optional[PoseDataSample] = None, - draw_gt: bool = True, - draw_pred: bool = True, - draw_2d: bool = True, - det_kpt_color: Optional[Union[str, Tuple[Tuple[int]]]] = None, - det_dataset_skeleton: Optional[Union[str, - Tuple[Tuple[int]]]] = None, - det_dataset_link_color: Optional[np.ndarray] = None, - draw_bbox: bool = False, - show_kpt_idx: bool = False, - skeleton_style: str = 'mmpose', - show: bool = False, - wait_time: float = 0, - out_file: Optional[str] = None, - kpt_thr: float = 0.3, - step: int = 0) -> None: + def add_datasample(self, + name: str, + image: np.ndarray, + data_sample: PoseDataSample, + det_data_sample: Optional[PoseDataSample] = None, + draw_gt: bool = True, + draw_pred: bool = True, + draw_2d: bool = True, + draw_bbox: bool = False, + show_kpt_idx: bool = False, + skeleton_style: str = 'mmpose', + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + kpt_thr: float = 0.3, + step: int = 0) -> None: """Draw datasample and save to all backends. - If GT and prediction are plotted at the same time, they are @@ -495,12 +493,6 @@ def add_datasample( Defaults to ``True`` draw_2d (bool): Whether to draw 2d detection results. Defaults to ``True`` - det_kpt_color (str, tuple(tuple(int)), optional): Keypoints color - info for detection. Defaults to ``None`` - det_dataset_skeleton (np.ndarray, optional): The skeleton link info - for detection data. Default to ``None`` - det_dataset_link_color (str, tuple(tuple(int)), optional): Link - color for detection. Defaults to ``None`` draw_bbox (bool): Whether to draw bounding boxes. Default to ``False`` show_kpt_idx (bool): Whether to show the index of keypoints. @@ -526,8 +518,7 @@ def add_datasample( if 'pred_instances' in det_data_sample: det_img_data = self._draw_instances_kpts( det_img_data, det_data_sample.pred_instances, kpt_thr, - show_kpt_idx, skeleton_style, det_kpt_color, - det_dataset_skeleton, det_dataset_link_color) + show_kpt_idx, skeleton_style) if draw_bbox: det_img_data = self._draw_instances_bbox( det_img_data, det_data_sample.pred_instances) From 0c02149b6fe5766282715a2b2f0429ec24ef498f Mon Sep 17 00:00:00 2001 From: Tau Date: Fri, 16 Jun 2023 11:33:30 +0800 Subject: [PATCH 30/52] [Docs] Update docs and readme (#2461) --- README.md | 21 +++++++++++---------- README_CN.md | 21 +++++++++++---------- docs/src/papers/datasets/human_art.md | 16 ++++++++++++++++ docs/src/papers/datasets/lapa.md | 18 ++++++++++++++++++ 4 files changed, 56 insertions(+), 20 deletions(-) create mode 100644 docs/src/papers/datasets/human_art.md create mode 100644 docs/src/papers/datasets/lapa.md diff --git a/README.md b/README.md index 2b78649703..b25013ded5 100644 --- a/README.md +++ b/README.md @@ -214,13 +214,13 @@ A summary can be found in the [Model Zoo](https://mmpose.readthedocs.io/en/lates - [x] [DeepPose](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#deeppose-cvpr-2014) (CVPR'2014) - [x] [CPM](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/backbones.html#cpm-cvpr-2016) (CVPR'2016) - [x] [Hourglass](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/backbones.html#hourglass-eccv-2016) (ECCV'2016) -- [ ] [SimpleBaseline3D](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#simplebaseline3d-iccv-2017) (ICCV'2017) +- [x] [SimpleBaseline3D](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#simplebaseline3d-iccv-2017) (ICCV'2017) - [ ] [Associative Embedding](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#associative-embedding-nips-2017) (NeurIPS'2017) - [x] [SimpleBaseline2D](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#simplebaseline2d-eccv-2018) (ECCV'2018) - [x] [DSNT](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#dsnt-2018) (ArXiv'2021) - [x] [HRNet](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/backbones.html#hrnet-cvpr-2019) (CVPR'2019) - [x] [IPR](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#ipr-eccv-2018) (ECCV'2018) -- [ ] [VideoPose3D](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#videopose3d-cvpr-2019) (CVPR'2019) +- [x] [VideoPose3D](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/algorithms.html#videopose3d-cvpr-2019) (CVPR'2019) - [x] [HRNetv2](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/backbones.html#hrnetv2-tpami-2019) (TPAMI'2019) - [x] [MSPN](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/backbones.html#mspn-arxiv-2019) (ArXiv'2019) - [x] [SCNet](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/backbones.html#scnet-cvpr-2020) (CVPR'2020) @@ -238,14 +238,14 @@ A summary can be found in the [Model Zoo](https://mmpose.readthedocs.io/en/lates
    Supported techniques: -- [ ] [FPN](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#fpn-cvpr-2017) (CVPR'2017) -- [ ] [FP16](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#fp16-arxiv-2017) (ArXiv'2017) -- [ ] [Wingloss](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#wingloss-cvpr-2018) (CVPR'2018) -- [ ] [AdaptiveWingloss](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#adaptivewingloss-iccv-2019) (ICCV'2019) +- [x] [FPN](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#fpn-cvpr-2017) (CVPR'2017) +- [x] [FP16](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#fp16-arxiv-2017) (ArXiv'2017) +- [x] [Wingloss](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#wingloss-cvpr-2018) (CVPR'2018) +- [x] [AdaptiveWingloss](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#adaptivewingloss-iccv-2019) (ICCV'2019) - [x] [DarkPose](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#darkpose-cvpr-2020) (CVPR'2020) - [x] [UDP](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#udp-cvpr-2020) (CVPR'2020) -- [ ] [Albumentations](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#albumentations-information-2020) (Information'2020) -- [ ] [SoftWingloss](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#softwingloss-tip-2021) (TIP'2021) +- [x] [Albumentations](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#albumentations-information-2020) (Information'2020) +- [x] [SoftWingloss](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#softwingloss-tip-2021) (TIP'2021) - [x] [RLE](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/techniques.html#rle-iccv-2021) (ICCV'2021)
    @@ -284,7 +284,8 @@ A summary can be found in the [Model Zoo](https://mmpose.readthedocs.io/en/lates - [x] [InterHand2.6M](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#interhand2-6m-eccv-2020) \[[homepage](https://mks0601.github.io/InterHand2.6M/)\] (ECCV'2020) - [x] [AP-10K](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#ap-10k-neurips-2021) \[[homepage](https://github.com/AlexTheBad/AP-10K)\] (NeurIPS'2021) - [x] [Horse-10](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#horse-10-wacv-2021) \[[homepage](http://www.mackenziemathislab.org/horse10)\] (WACV'2021) -- [x] [Human-Art](#todo) \[[homepage](https://idea-research.github.io/HumanArt/)\] (CVPR'2023) +- [x] [Human-Art](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#human-art-cvpr-2023) \[[homepage](https://idea-research.github.io/HumanArt/)\] (CVPR'2023) +- [x] [LaPa](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#lapa-aaai-2020) \[[homepage](https://github.com/JDAI-CV/lapa-dataset)\] (AAAI'2020)
    @@ -310,7 +311,7 @@ A summary can be found in the [Model Zoo](https://mmpose.readthedocs.io/en/lates ### Model Request -We will keep up with the latest progress of the community, and support more popular algorithms and frameworks. If you have any feature requests, please feel free to leave a comment in [MMPose Roadmap](https://github.com/open-mmlab/mmpose/issues/9). +We will keep up with the latest progress of the community, and support more popular algorithms and frameworks. If you have any feature requests, please feel free to leave a comment in [MMPose Roadmap](https://github.com/open-mmlab/mmpose/issues/2258). ## Contributing diff --git a/README_CN.md b/README_CN.md index f74b3adb76..4948b7848a 100644 --- a/README_CN.md +++ b/README_CN.md @@ -212,13 +212,13 @@ MMPose v1.0.0 是一个重大更新,包括了大量的 API 和配置文件的 - [x] [DeepPose](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#deeppose-cvpr-2014) (CVPR'2014) - [x] [CPM](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/backbones.html#cpm-cvpr-2016) (CVPR'2016) - [x] [Hourglass](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/backbones.html#hourglass-eccv-2016) (ECCV'2016) -- [ ] [SimpleBaseline3D](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#simplebaseline3d-iccv-2017) (ICCV'2017) +- [x] [SimpleBaseline3D](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#simplebaseline3d-iccv-2017) (ICCV'2017) - [ ] [Associative Embedding](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#associative-embedding-nips-2017) (NeurIPS'2017) - [x] [SimpleBaseline2D](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#simplebaseline2d-eccv-2018) (ECCV'2018) - [x] [DSNT](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#dsnt-2018) (ArXiv'2021) - [x] [HRNet](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/backbones.html#hrnet-cvpr-2019) (CVPR'2019) - [x] [IPR](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#ipr-eccv-2018) (ECCV'2018) -- [ ] [VideoPose3D](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#videopose3d-cvpr-2019) (CVPR'2019) +- [x] [VideoPose3D](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/algorithms.html#videopose3d-cvpr-2019) (CVPR'2019) - [x] [HRNetv2](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/backbones.html#hrnetv2-tpami-2019) (TPAMI'2019) - [x] [MSPN](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/backbones.html#mspn-arxiv-2019) (ArXiv'2019) - [x] [SCNet](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/backbones.html#scnet-cvpr-2020) (CVPR'2020) @@ -236,14 +236,14 @@ MMPose v1.0.0 是一个重大更新,包括了大量的 API 和配置文件的
    支持的技术 -- [ ] [FPN](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#fpn-cvpr-2017) (CVPR'2017) -- [ ] [FP16](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#fp16-arxiv-2017) (ArXiv'2017) -- [ ] [Wingloss](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#wingloss-cvpr-2018) (CVPR'2018) -- [ ] [AdaptiveWingloss](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#adaptivewingloss-iccv-2019) (ICCV'2019) +- [x] [FPN](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#fpn-cvpr-2017) (CVPR'2017) +- [x] [FP16](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#fp16-arxiv-2017) (ArXiv'2017) +- [x] [Wingloss](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#wingloss-cvpr-2018) (CVPR'2018) +- [x] [AdaptiveWingloss](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#adaptivewingloss-iccv-2019) (ICCV'2019) - [x] [DarkPose](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#darkpose-cvpr-2020) (CVPR'2020) - [x] [UDP](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#udp-cvpr-2020) (CVPR'2020) -- [ ] [Albumentations](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#albumentations-information-2020) (Information'2020) -- [ ] [SoftWingloss](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#softwingloss-tip-2021) (TIP'2021) +- [x] [Albumentations](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#albumentations-information-2020) (Information'2020) +- [x] [SoftWingloss](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#softwingloss-tip-2021) (TIP'2021) - [x] [RLE](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/techniques.html#rle-iccv-2021) (ICCV'2021)
    @@ -282,7 +282,8 @@ MMPose v1.0.0 是一个重大更新,包括了大量的 API 和配置文件的 - [x] [InterHand2.6M](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/datasets.html#interhand2-6m-eccv-2020) \[[主页](https://mks0601.github.io/InterHand2.6M/)\] (ECCV'2020) - [x] [AP-10K](https://mmpose.readthedocs.io/en/latest/model_zoo_papers/datasets.html#ap-10k-neurips-2021) \[[主页](https://github.com/AlexTheBad/AP-10K)\] (NeurIPS'2021) - [x] [Horse-10](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/datasets.html#horse-10-wacv-2021) \[[主页](http://www.mackenziemathislab.org/horse10)\] (WACV'2021) -- [x] [Human-Art](#todo) \[[homepage](https://idea-research.github.io/HumanArt/)\] (CVPR'2023) +- [x] [Human-Art](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/datasets.html#human-art-cvpr-2023) \[[主页](https://idea-research.github.io/HumanArt/)\] (CVPR'2023) +- [x] [LaPa](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo_papers/datasets.html#lapa-aaai-2020) \[[主页](https://github.com/JDAI-CV/lapa-dataset)\] (AAAI'2020)
    @@ -308,7 +309,7 @@ MMPose v1.0.0 是一个重大更新,包括了大量的 API 和配置文件的 ### 模型需求 -我们将跟进学界的最新进展,并支持更多算法和框架。如果您对 MMPose 有任何功能需求,请随时在 [MMPose Roadmap](https://github.com/open-mmlab/mmpose/issues/9) 中留言。 +我们将跟进学界的最新进展,并支持更多算法和框架。如果您对 MMPose 有任何功能需求,请随时在 [MMPose Roadmap](https://github.com/open-mmlab/mmpose/issues/2258) 中留言。 ## 参与贡献 diff --git a/docs/src/papers/datasets/human_art.md b/docs/src/papers/datasets/human_art.md new file mode 100644 index 0000000000..dc39dabbad --- /dev/null +++ b/docs/src/papers/datasets/human_art.md @@ -0,0 +1,16 @@ +# Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes + + + +
    +Human-Art (CVPR'2023) + +```bibtex +@inproceedings{ju2023humanart, + title={Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes}, + author={Ju, Xuan and Zeng, Ailing and Jianan, Wang and Qiang, Xu and Lei, Zhang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), + year={2023}} +``` + +
    diff --git a/docs/src/papers/datasets/lapa.md b/docs/src/papers/datasets/lapa.md new file mode 100644 index 0000000000..f82c50ca22 --- /dev/null +++ b/docs/src/papers/datasets/lapa.md @@ -0,0 +1,18 @@ +# A New Dataset and Boundary-Attention Semantic Segmentation for Face Parsing + + + +
    +LaPa (AAAI'2020) + +```bibtex +@inproceedings{liu2020new, + title={A New Dataset and Boundary-Attention Semantic Segmentation for Face Parsing.}, + author={Liu, Yinglu and Shi, Hailin and Shen, Hao and Si, Yue and Wang, Xiaobo and Mei, Tao}, + booktitle={AAAI}, + pages={11637--11644}, + year={2020} +} +``` + +
    From 6d9d05e6e3dbbd52244c69a3f7013d2b42e1d528 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Fri, 16 Jun 2023 14:14:26 +0800 Subject: [PATCH 31/52] [Docs] Update 3d inferencer doc (#2462) --- demo/docs/en/3d_human_pose_demo.md | 13 +++++----- docs/en/user_guides/inference.md | 37 +++++++++++++++-------------- docs/zh_cn/user_guides/inference.md | 37 +++++++++++++++-------------- 3 files changed, 44 insertions(+), 43 deletions(-) diff --git a/demo/docs/en/3d_human_pose_demo.md b/demo/docs/en/3d_human_pose_demo.md index 3a28c950f0..d219b8683a 100644 --- a/demo/docs/en/3d_human_pose_demo.md +++ b/demo/docs/en/3d_human_pose_demo.md @@ -78,17 +78,16 @@ https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_ ### 3D Human Pose Demo with Inferencer -The Inferencer provides a convenient interface for inference, allowing customization using model aliases instead of configuration files and checkpoint paths. 3D Inferencer only supports video path or webcam as input currently. Below is an example command: +The Inferencer provides a convenient interface for inference, allowing customization using model aliases instead of configuration files and checkpoint paths. It supports various input formats, including image paths, video paths, image folder paths, and webcams. Below is an example command: ```shell -python demo/inferencer_demo.py {VIDEO_PATH} \ - --pose3d human3d --vis-out-dir vis_results/human3d +python demo/inferencer_demo.py tests/data/coco/000000000785.jpg \ + --pose3d human3d --vis-out-dir vis_results/human3d \ + --rebase-keypoint-height ``` -This command infers the video and saves the visualization results in the `vis_results/human3d` directory. +This command infers the image and saves the visualization results in the `vis_results/human3d` directory. -Image 1 - -The input video can be downloaded from [Google Drive](https://drive.google.com/file/d/10qEKW15P3-F8xOlAMav5se6fUQTShuT0/view?usp=sharing). +Image 1 In addition, the Inferencer supports saving predicted poses. For more information, please refer to the [inferencer document](https://mmpose.readthedocs.io/en/latest/user_guides/inference.html#inferencer-a-unified-inference-interface). diff --git a/docs/en/user_guides/inference.md b/docs/en/user_guides/inference.md index c9102132ac..228ba0f5e1 100644 --- a/docs/en/user_guides/inference.md +++ b/docs/en/user_guides/inference.md @@ -224,24 +224,25 @@ The `MMPoseInferencer` offers a variety of arguments for customizing pose estima The inferencer is designed for both visualization and saving predictions. The table below presents the list of arguments available when using the `MMPoseInferencer` for inference, along with their compatibility with 2D and 3D inferencing: -| Argument | Description | 2D | 3D | -| ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | --- | --- | -| `show` | Controls the display of the image or video in a pop-up window. | ✔️ | ✔️ | -| `radius` | Sets the visualization keypoint radius. | ✔️ | ✔️ | -| `thickness` | Determines the link thickness for visualization. | ✔️ | ✔️ | -| `kpt_thr` | Sets the keypoint score threshold. Keypoints with scores exceeding this threshold will be displayed. | ✔️ | ✔️ | -| `draw_bbox` | Decides whether to display the bounding boxes of instances. | ✔️ | ✔️ | -| `draw_heatmap` | Decides if the predicted heatmaps should be drawn. | ✔️ | ❌ | -| `black_background` | Decides whether the estimated poses should be displayed on a black background. | ✔️ | ❌ | -| `skeleton_style` | Sets the skeleton style. Options include 'mmpose' (default) and 'openpose'. | ✔️ | ❌ | -| `use_oks_tracking` | Decides whether to use OKS as a similarity measure in tracking. | ❌ | ✔️ | -| `tracking_thr` | Sets the similarity threshold for tracking. | ❌ | ✔️ | -| `norm_pose_2d` | Decides whether to scale the bounding box to the dataset's average bounding box scale and relocate the bounding box to the dataset's average bounding box center. | ❌ | ✔️ | -| `return_vis` | Decides whether to include visualization images in the results. | ✔️ | ✔️ | -| `vis_out_dir` | Defines the folder path to save the visualization images. If unset, the visualization images will not be saved. | ✔️ | ✔️ | -| `return_datasample` | Determines if the prediction should be returned in the `PoseDataSample` format. | ✔️ | ✔️ | -| `pred_out_dir` | Specifies the folder path to save the predictions. If unset, the predictions will not be saved. | ✔️ | ✔️ | -| `out_dir` | If `vis_out_dir` or `pred_out_dir` is unset, these will be set to `f'{out_dir}/visualization'` or `f'{out_dir}/predictions'`, respectively. | ✔️ | ✔️ | +| Argument | Description | 2D | 3D | +| ------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | --- | --- | +| `show` | Controls the display of the image or video in a pop-up window. | ✔️ | ✔️ | +| `radius` | Sets the visualization keypoint radius. | ✔️ | ✔️ | +| `thickness` | Determines the link thickness for visualization. | ✔️ | ✔️ | +| `kpt_thr` | Sets the keypoint score threshold. Keypoints with scores exceeding this threshold will be displayed. | ✔️ | ✔️ | +| `draw_bbox` | Decides whether to display the bounding boxes of instances. | ✔️ | ✔️ | +| `draw_heatmap` | Decides if the predicted heatmaps should be drawn. | ✔️ | ❌ | +| `black_background` | Decides whether the estimated poses should be displayed on a black background. | ✔️ | ❌ | +| `skeleton_style` | Sets the skeleton style. Options include 'mmpose' (default) and 'openpose'. | ✔️ | ❌ | +| `use_oks_tracking` | Decides whether to use OKS as a similarity measure in tracking. | ❌ | ✔️ | +| `tracking_thr` | Sets the similarity threshold for tracking. | ❌ | ✔️ | +| `norm_pose_2d` | Decides whether to scale the bounding box to the dataset's average bounding box scale and relocate the bounding box to the dataset's average bounding box center. | ❌ | ✔️ | +| `rebase_keypoint_height` | Decides whether to set the lowest keypoint with height 0. | ❌ | ✔️ | +| `return_vis` | Decides whether to include visualization images in the results. | ✔️ | ✔️ | +| `vis_out_dir` | Defines the folder path to save the visualization images. If unset, the visualization images will not be saved. | ✔️ | ✔️ | +| `return_datasample` | Determines if the prediction should be returned in the `PoseDataSample` format. | ✔️ | ✔️ | +| `pred_out_dir` | Specifies the folder path to save the predictions. If unset, the predictions will not be saved. | ✔️ | ✔️ | +| `out_dir` | If `vis_out_dir` or `pred_out_dir` is unset, these will be set to `f'{out_dir}/visualization'` or `f'{out_dir}/predictions'`, respectively. | ✔️ | ✔️ | ### Model Alias diff --git a/docs/zh_cn/user_guides/inference.md b/docs/zh_cn/user_guides/inference.md index 3b764d659c..a15a52841b 100644 --- a/docs/zh_cn/user_guides/inference.md +++ b/docs/zh_cn/user_guides/inference.md @@ -212,24 +212,25 @@ result = next(result_generator) 推理器被设计用于可视化和保存预测。以下表格列出了在使用 `MMPoseInferencer` 进行推断时可用的参数列表,以及它们与 2D 和 3D 推理器的兼容性: -| 参数 | 描述 | 2D | 3D | -| ------------------- | -------------------------------------------------------------------------------------------------------------------------- | --- | --- | -| `show` | 控制是否在弹出窗口中显示图像或视频。 | ✔️ | ✔️ | -| `radius` | 设置可视化关键点的半径。 | ✔️ | ✔️ | -| `thickness` | 确定可视化链接的厚度。 | ✔️ | ✔️ | -| `kpt_thr` | 设置关键点分数阈值。分数超过此阈值的关键点将被显示。 | ✔️ | ✔️ | -| `draw_bbox` | 决定是否显示实例的边界框。 | ✔️ | ✔️ | -| `draw_heatmap` | 决定是否绘制预测的热图。 | ✔️ | ❌ | -| `black_background` | 决定是否在黑色背景上显示预估的姿势。 | ✔️ | ❌ | -| `skeleton_style` | 设置骨架样式。可选项包括 'mmpose'(默认)和 'openpose'。 | ✔️ | ❌ | -| `use_oks_tracking` | 决定是否在追踪中使用OKS作为相似度测量。 | ❌ | ✔️ | -| `tracking_thr` | 设置追踪的相似度阈值。 | ❌ | ✔️ | -| `norm_pose_2d` | 决定是否将边界框缩放至数据集的平均边界框尺寸,并将边界框移至数据集的平均边界框中心。 | ❌ | ✔️ | -| `return_vis` | 决定是否在结果中包含可视化图像。 | ✔️ | ✔️ | -| `vis_out_dir` | 定义保存可视化图像的文件夹路径。如果未设置,将不保存可视化图像。 | ✔️ | ✔️ | -| `return_datasample` | 决定是否以 `PoseDataSample` 格式返回预测。 | ✔️ | ✔️ | -| `pred_out_dir` | 指定保存预测的文件夹路径。如果未设置,将不保存预测。 | ✔️ | ✔️ | -| `out_dir` | 如果 `vis_out_dir` 或 `pred_out_dir` 未设置,它们将分别设置为 `f'{out_dir}/visualization'` 或 `f'{out_dir}/predictions'`。 | ✔️ | ✔️ | +| 参数 | 描述 | 2D | 3D | +| ------------------------ | -------------------------------------------------------------------------------------------------------------------------- | --- | --- | +| `show` | 控制是否在弹出窗口中显示图像或视频。 | ✔️ | ✔️ | +| `radius` | 设置可视化关键点的半径。 | ✔️ | ✔️ | +| `thickness` | 确定可视化链接的厚度。 | ✔️ | ✔️ | +| `kpt_thr` | 设置关键点分数阈值。分数超过此阈值的关键点将被显示。 | ✔️ | ✔️ | +| `draw_bbox` | 决定是否显示实例的边界框。 | ✔️ | ✔️ | +| `draw_heatmap` | 决定是否绘制预测的热图。 | ✔️ | ❌ | +| `black_background` | 决定是否在黑色背景上显示预估的姿势。 | ✔️ | ❌ | +| `skeleton_style` | 设置骨架样式。可选项包括 'mmpose'(默认)和 'openpose'。 | ✔️ | ❌ | +| `use_oks_tracking` | 决定是否在追踪中使用OKS作为相似度测量。 | ❌ | ✔️ | +| `tracking_thr` | 设置追踪的相似度阈值。 | ❌ | ✔️ | +| `norm_pose_2d` | 决定是否将边界框缩放至数据集的平均边界框尺寸,并将边界框移至数据集的平均边界框中心。 | ❌ | ✔️ | +| `rebase_keypoint_height` | 决定是否将最低关键点的高度置为 0。 | ❌ | ✔️ | +| `return_vis` | 决定是否在结果中包含可视化图像。 | ✔️ | ✔️ | +| `vis_out_dir` | 定义保存可视化图像的文件夹路径。如果未设置,将不保存可视化图像。 | ✔️ | ✔️ | +| `return_datasample` | 决定是否以 `PoseDataSample` 格式返回预测。 | ✔️ | ✔️ | +| `pred_out_dir` | 指定保存预测的文件夹路径。如果未设置,将不保存预测。 | ✔️ | ✔️ | +| `out_dir` | 如果 `vis_out_dir` 或 `pred_out_dir` 未设置,它们将分别设置为 `f'{out_dir}/visualization'` 或 `f'{out_dir}/predictions'`。 | ✔️ | ✔️ | ### 模型别名 From a912a395cf9fa9293b35d3c14c1208dff5666fef Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 19 Jun 2023 10:11:30 +0800 Subject: [PATCH 32/52] [Feature] Support New config type (#2463) --- README.md | 6 +- README_CN.md | 4 +- demo/MMPose_Tutorial.ipynb | 14 +- mmpose/apis/inference.py | 8 +- .../inferencers/base_mmpose_inferencer.py | 4 +- mmpose/configs/_base_/default_runtime.py | 54 ++++ .../rtmpose_m_8xb256-420e_coco-256x192.py | 251 +++++++++++++++ .../rtmpose_s_8xb256_420e_aic_coco_256x192.py | 292 ++++++++++++++++++ ...m_hrnet-w48_udp-8xb32-210e_coco-256x192.py | 167 ++++++++++ .../datasets/transforms/common_transforms.py | 2 + .../mmpose4aigc/openpose_visualization.py | 4 +- projects/{yolox-pose => yolox_pose}/README.md | 0 .../configs/_base_/datasets | 0 .../configs/_base_/default_runtime.py | 0 .../configs/_base_/py_default_runtime.py | 45 +++ .../py_yolox_pose_s_8xb32_300e_coco.py | 281 +++++++++++++++++ .../configs/yolox-pose_l_4xb64-300e_coco.py | 0 .../configs/yolox-pose_m_4xb64-300e_coco.py | 0 .../configs/yolox-pose_s_8xb32-300e_coco.py | 0 .../yolox-pose_tiny_4xb64-300e_coco.py | 0 .../datasets/__init__.py | 0 .../datasets/bbox_keypoint_structure.py | 0 .../datasets/coco_dataset.py | 0 .../datasets/transforms.py | 0 projects/{yolox-pose => yolox_pose}/demo | 0 .../models/__init__.py | 0 .../models/assigner.py | 0 .../models/data_preprocessor.py | 0 .../models/oks_loss.py | 0 .../models/utils.py | 0 .../models/yolox_pose_head.py | 0 projects/{yolox-pose => yolox_pose}/tools | 0 setup.cfg | 5 + tools/misc/browse_dataset.py | 4 +- tools/train.py | 5 +- 35 files changed, 1133 insertions(+), 13 deletions(-) create mode 100644 mmpose/configs/_base_/default_runtime.py create mode 100644 mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py create mode 100644 mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py create mode 100644 mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py rename projects/{yolox-pose => yolox_pose}/README.md (100%) rename projects/{yolox-pose => yolox_pose}/configs/_base_/datasets (100%) rename projects/{yolox-pose => yolox_pose}/configs/_base_/default_runtime.py (100%) create mode 100644 projects/yolox_pose/configs/_base_/py_default_runtime.py create mode 100644 projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py rename projects/{yolox-pose => yolox_pose}/configs/yolox-pose_l_4xb64-300e_coco.py (100%) rename projects/{yolox-pose => yolox_pose}/configs/yolox-pose_m_4xb64-300e_coco.py (100%) rename projects/{yolox-pose => yolox_pose}/configs/yolox-pose_s_8xb32-300e_coco.py (100%) rename projects/{yolox-pose => yolox_pose}/configs/yolox-pose_tiny_4xb64-300e_coco.py (100%) rename projects/{yolox-pose => yolox_pose}/datasets/__init__.py (100%) rename projects/{yolox-pose => yolox_pose}/datasets/bbox_keypoint_structure.py (100%) rename projects/{yolox-pose => yolox_pose}/datasets/coco_dataset.py (100%) rename projects/{yolox-pose => yolox_pose}/datasets/transforms.py (100%) rename projects/{yolox-pose => yolox_pose}/demo (100%) rename projects/{yolox-pose => yolox_pose}/models/__init__.py (100%) rename projects/{yolox-pose => yolox_pose}/models/assigner.py (100%) rename projects/{yolox-pose => yolox_pose}/models/data_preprocessor.py (100%) rename projects/{yolox-pose => yolox_pose}/models/oks_loss.py (100%) rename projects/{yolox-pose => yolox_pose}/models/utils.py (100%) rename projects/{yolox-pose => yolox_pose}/models/yolox_pose_head.py (100%) rename projects/{yolox-pose => yolox_pose}/tools (100%) diff --git a/README.md b/README.md index b25013ded5..80a975e35a 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,7 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- ## What's New -- We are excited to release **YOLOX-Pose**, a One-Stage multi-person pose estimation model based on YOLOX. Checkout our [project page](/projects/yolox-pose/) for more details. +- We are excited to release **YOLOX-Pose**, a One-Stage multi-person pose estimation model based on YOLOX. Checkout our [project page](/projects/yolox_pose/) for more details. ![yolox-pose_intro](https://user-images.githubusercontent.com/26127467/226655503-3cee746e-6e42-40be-82ae-6e7cae2a4c7e.jpg) @@ -108,7 +108,7 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- - Build individual projects with full power of MMPose but not bound up with heavy frameworks - Checkout new projects: - [RTMPose](/projects/rtmpose/) - - [YOLOX-Pose](/projects/yolox-pose/) + - [YOLOX-Pose](/projects/yolox_pose/) - [MMPose4AIGC](/projects/mmpose4aigc/) - Become a contributors and make MMPose greater. Start your journey from the [example project](/projects/example_project/) @@ -116,7 +116,7 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- - 2022-04-06: MMPose [v1.0.0](https://github.com/open-mmlab/mmpose/releases/tag/v1.0.0) is officially released, with the main updates including: - - Release of [YOLOX-Pose](/projects/yolox-pose/), a One-Stage multi-person pose estimation model based on YOLOX + - Release of [YOLOX-Pose](/projects/yolox_pose/), a One-Stage multi-person pose estimation model based on YOLOX - Development of [MMPose for AIGC](/projects/mmpose4aigc/) based on RTMPose, generating high-quality skeleton images for Pose-guided AIGC projects - Support for OpenPose-style skeleton visualization - More complete and user-friendly [documentation and tutorials](https://mmpose.readthedocs.io/en/latest/overview.html) diff --git a/README_CN.md b/README_CN.md index 4948b7848a..2c0c725fa3 100644 --- a/README_CN.md +++ b/README_CN.md @@ -106,7 +106,7 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- - 通过独立项目的形式,利用 MMPose 的强大功能,同时不被代码框架所束缚 - 最新添加的项目包括: - [RTMPose](/projects/rtmpose/) - - [YOLOX-Pose](/projects/yolox-pose/) + - [YOLOX-Pose](/projects/yolox_pose/) - [MMPose4AIGC](/projects/mmpose4aigc/) - 从简单的 [示例项目](/projects/example_project/) 开启您的 MMPose 代码贡献者之旅吧,让我们共同打造更好用的 MMPose! @@ -114,7 +114,7 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- - 2022-04-06:MMPose [v1.0.0](https://github.com/open-mmlab/mmpose/releases/tag/v1.0.0) 正式发布了,主要更新包括: - - 发布了 [YOLOX-Pose](/projects/yolox-pose/),一个基于 YOLOX 的 One-Stage 多人姿态估计模型 + - 发布了 [YOLOX-Pose](/projects/yolox_pose/),一个基于 YOLOX 的 One-Stage 多人姿态估计模型 - 基于 RTMPose 开发的 [MMPose for AIGC](/projects/mmpose4aigc/),生成高质量骨架图片用于 Pose-guided AIGC 项目 - 支持 OpenPose 风格的骨架可视化 - 更加完善、友好的 [文档和教程](https://mmpose.readthedocs.io/zh_CN/latest/overview.html) diff --git a/demo/MMPose_Tutorial.ipynb b/demo/MMPose_Tutorial.ipynb index b6d3b6aadb..0e9ff9b57f 100644 --- a/demo/MMPose_Tutorial.ipynb +++ b/demo/MMPose_Tutorial.ipynb @@ -11,6 +11,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "8xX3YewOtqV0" @@ -28,6 +29,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "bkw-kUD8t3t8" @@ -611,6 +613,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "r2bf94XpyFnk" @@ -743,7 +746,9 @@ " \"\"\"Visualize predicted keypoints (and heatmaps) of one image.\"\"\"\n", "\n", " # predict bbox\n", - " init_default_scope(detector.cfg.get('default_scope', 'mmdet'))\n", + " scope = detector.cfg.get('default_scope', 'mmdet')\n", + " if scope is not None:\n", + " init_default_scope(scope)\n", " detect_result = inference_detector(detector, img_path)\n", " pred_instance = detect_result.pred_instances.cpu().numpy()\n", " bboxes = np.concatenate(\n", @@ -854,6 +859,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "42HG6DSNI0Ke" @@ -1089,6 +1095,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "H-dMbjgnJzbH" @@ -1100,6 +1107,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "jCu4npV2rl_Q" @@ -1195,6 +1203,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "UmGitQZkUnom" @@ -1495,6 +1504,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "UlD8iDZehE2S" @@ -1655,6 +1665,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "ChVqB1oYncmo" @@ -3533,6 +3544,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "sdLwcaojhE2T" diff --git a/mmpose/apis/inference.py b/mmpose/apis/inference.py index 7f733fff45..3f674b3677 100644 --- a/mmpose/apis/inference.py +++ b/mmpose/apis/inference.py @@ -96,7 +96,9 @@ def init_model(config: Union[str, Path, Config], config.model.train_cfg = None # register all modules in mmpose into the registries - init_default_scope(config.get('default_scope', 'mmpose')) + scope = config.get('default_scope', 'mmpose') + if scope is not None: + init_default_scope(scope) model = build_pose_estimator(config.model) model = revert_sync_batchnorm(model) @@ -149,7 +151,9 @@ def inference_topdown(model: nn.Module, ``data_sample.pred_instances.keypoints`` and ``data_sample.pred_instances.keypoint_scores``. """ - init_default_scope(model.cfg.get('default_scope', 'mmpose')) + scope = model.cfg.get('default_scope', 'mmpose') + if scope is not None: + init_default_scope(scope) pipeline = Compose(model.cfg.test_dataloader.dataset.pipeline) if bboxes is None: diff --git a/mmpose/apis/inferencers/base_mmpose_inferencer.py b/mmpose/apis/inferencers/base_mmpose_inferencer.py index 0ea6e9c156..bed28b90d7 100644 --- a/mmpose/apis/inferencers/base_mmpose_inferencer.py +++ b/mmpose/apis/inferencers/base_mmpose_inferencer.py @@ -223,7 +223,9 @@ def _init_pipeline(self, cfg: ConfigType) -> Callable: ``np.ndarray``. The returned pipeline will be used to process a single data. """ - init_default_scope(cfg.get('default_scope', 'mmpose')) + scope = cfg.get('default_scope', 'mmpose') + if scope is not None: + init_default_scope(scope) return Compose(cfg.test_dataloader.dataset.pipeline) def update_model_visualizer_settings(self, **kwargs): diff --git a/mmpose/configs/_base_/default_runtime.py b/mmpose/configs/_base_/default_runtime.py new file mode 100644 index 0000000000..349ecf4b17 --- /dev/null +++ b/mmpose/configs/_base_/default_runtime.py @@ -0,0 +1,54 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook, SyncBuffersHook) +from mmengine.runner import LogProcessor +from mmengine.visualization import LocalVisBackend + +from mmpose.engine.hooks import PoseVisualizationHook +from mmpose.visualization import PoseLocalVisualizer + +default_scope = None + +# hooks +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, interval=10), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=PoseVisualizationHook, enable=False), +) + +# custom hooks +custom_hooks = [ + # Synchronize model buffers such as running_mean and running_var in BN + # at the end of each epoch + dict(type=SyncBuffersHook) +] + +# multi-processing backend +env_cfg = dict( + cudnn_benchmark=False, + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + dist_cfg=dict(backend='nccl'), +) + +# visualizer +vis_backends = [dict(type=LocalVisBackend)] +visualizer = dict( + type=PoseLocalVisualizer, vis_backends=vis_backends, name='visualizer') + +# logger +log_processor = dict( + type=LogProcessor, window_size=50, by_epoch=True, num_digits=6) +log_level = 'INFO' +load_from = None +resume = False + +# file I/O backend +backend_args = dict(backend='local') + +# training/validation/testing progress +train_cfg = dict(by_epoch=True) +val_cfg = dict() +test_cfg = dict() diff --git a/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py new file mode 100644 index 0000000000..759750110c --- /dev/null +++ b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py @@ -0,0 +1,251 @@ +# Copyright (c) OpenMMLab. All rights reserved. +if '_base_': + from mmpose.configs._base_.default_runtime import * + +from albumentations.augmentations import Blur, CoarseDropout, MedianBlur +from mmdet.datasets.transforms import YOLOXHSVRandomAug +from mmdet.engine.hooks import PipelineSwitchHook +from mmdet.models import CSPNeXt +from mmengine.dataset import DefaultSampler +from mmengine.hooks import EMAHook +from mmengine.model import PretrainedInit +from mmengine.optim import CosineAnnealingLR, LinearLR, OptimWrapper +from torch.nn import SiLU, SyncBatchNorm +from torch.optim import AdamW + +from mmpose.codecs import SimCCLabel +from mmpose.datasets import (CocoDataset, GenerateTarget, GetBBoxCenterScale, + LoadImage, PackPoseInputs, RandomFlip, + RandomHalfBody, TopdownAffine) +from mmpose.datasets.transforms.common_transforms import (Albumentation, + RandomBBoxTransform) +from mmpose.engine.hooks import ExpMomentumEMA +from mmpose.evaluation import CocoMetric +from mmpose.models import (KLDiscretLoss, PoseDataPreprocessor, RTMCCHead, + TopdownPoseEstimator) + +# runtime +max_epochs = 420 +stage2_num_epochs = 30 +base_lr = 4e-3 + +train_cfg.update(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type=OptimWrapper, + optimizer=dict(type=AdamW, lr=base_lr, weight_decay=0.05), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type=LinearLR, start_factor=1.0e-5, by_epoch=False, begin=0, end=1000), + dict( + # use cosine lr from 210 to 420 epoch + type=CosineAnnealingLR, + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type=SimCCLabel, + input_size=(192, 256), + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type=TopdownPoseEstimator, + data_preprocessor=dict( + type=PoseDataPreprocessor, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type=CSPNeXt, + arch='P5', + expand_ratio=0.5, + deepen_factor=0.67, + widen_factor=0.75, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type=SyncBatchNorm), + act_cfg=dict(type=SiLU), + init_cfg=dict( + type=PretrainedInit, + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmpose/cspnext-m_udp-aic-coco_210e-256x192-f2f7d6f6_20230130.pth' # noqa + )), + head=dict( + type=RTMCCHead, + in_channels=768, + out_channels=17, + input_size=codec['input_size'], + in_featuremap_size=(6, 8), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type=KLDiscretLoss, + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = CocoDataset +data_mode = 'topdown' +data_root = 'data/coco/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' +# })) + +# pipelines +train_pipeline = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=RandomFlip, direction='horizontal'), + dict(type=RandomHalfBody), + dict(type=RandomBBoxTransform, scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type=TopdownAffine, input_size=codec['input_size']), + dict(type=YOLOXHSVRandomAug), + dict( + type=Albumentation, + transforms=[ + dict(type=Blur, p=0.1), + dict(type=MedianBlur, p=0.1), + dict( + type=CoarseDropout, + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.), + ]), + dict(type=GenerateTarget, encoder=codec), + dict(type=PackPoseInputs) +] +val_pipeline = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=TopdownAffine, input_size=codec['input_size']), + dict(type=PackPoseInputs) +] + +train_pipeline_stage2 = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=RandomFlip, direction='horizontal'), + dict(type=RandomHalfBody), + dict( + type=RandomBBoxTransform, + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type=TopdownAffine, input_size=codec['input_size']), + dict(type=YOLOXHSVRandomAug), + dict( + type=Albumentation, + transforms=[ + dict(type=Blur, p=0.1), + dict(type=MedianBlur, p=0.1), + dict( + type=CoarseDropout, + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type=GenerateTarget, encoder=codec), + dict(type=PackPoseInputs) +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + drop_last=True, + sampler=dict(type=DefaultSampler, shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_train2017.json', + data_prefix=dict(img='train2017/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=64, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type=DefaultSampler, shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_val2017.json', + # bbox_file=f'{data_root}person_detection_results/' + # 'COCO_val2017_detections_AP_H_56_person.json', + data_prefix=dict(img='val2017/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks.update( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type=EMAHook, + ema_type=ExpMomentumEMA, + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type=PipelineSwitchHook, + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type=CocoMetric, + ann_file=data_root + 'annotations/person_keypoints_val2017.json') +test_evaluator = val_evaluator diff --git a/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py new file mode 100644 index 0000000000..f49c7a2e23 --- /dev/null +++ b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py @@ -0,0 +1,292 @@ +# Copyright (c) OpenMMLab. All rights reserved. +if '_base_': + from mmpose.configs._base_.default_runtime import * + +from albumentations.augmentations import Blur, CoarseDropout, MedianBlur +from mmdet.datasets.transforms import YOLOXHSVRandomAug +from mmdet.engine.hooks import PipelineSwitchHook +from mmdet.models import CSPNeXt +from mmengine.dataset import DefaultSampler, RepeatDataset +from mmengine.hooks import EMAHook +from mmengine.model import PretrainedInit +from mmengine.optim import CosineAnnealingLR, LinearLR, OptimWrapper +from torch.nn import SiLU, SyncBatchNorm +from torch.optim import AdamW + +from mmpose.codecs import SimCCLabel +from mmpose.datasets import (AicDataset, CocoDataset, CombinedDataset, + GenerateTarget, GetBBoxCenterScale, + KeypointConverter, LoadImage, PackPoseInputs, + RandomFlip, RandomHalfBody, TopdownAffine) +from mmpose.datasets.transforms.common_transforms import (Albumentation, + RandomBBoxTransform) +from mmpose.engine.hooks import ExpMomentumEMA +from mmpose.evaluation import CocoMetric +from mmpose.models import (KLDiscretLoss, PoseDataPreprocessor, RTMCCHead, + TopdownPoseEstimator) + +# runtime +max_epochs = 420 +stage2_num_epochs = 30 +base_lr = 4e-3 + +train_cfg.update(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type=OptimWrapper, + optimizer=dict(type=AdamW, lr=base_lr, weight_decay=0.0), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type=LinearLR, start_factor=1.0e-5, by_epoch=False, begin=0, end=1000), + dict( + # use cosine lr from 210 to 420 epoch + type=CosineAnnealingLR, + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type=SimCCLabel, + input_size=(192, 256), + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type=TopdownPoseEstimator, + data_preprocessor=dict( + type=PoseDataPreprocessor, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type=CSPNeXt, + arch='P5', + expand_ratio=0.5, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type=SyncBatchNorm), + act_cfg=dict(type=SiLU), + init_cfg=dict( + type=PretrainedInit, + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth' # noqa + )), + head=dict( + type=RTMCCHead, + in_channels=512, + out_channels=17, + input_size=codec['input_size'], + in_featuremap_size=(6, 8), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type=KLDiscretLoss, + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = CocoDataset +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/', +# f'{data_root}': 's3://openmmlab/datasets/' +# })) + +# pipelines +train_pipeline = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=RandomFlip, direction='horizontal'), + dict(type=RandomHalfBody), + dict(type=RandomBBoxTransform, scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type=TopdownAffine, input_size=codec['input_size']), + dict(type=YOLOXHSVRandomAug), + dict( + type=Albumentation, + transforms=[ + dict(type=Blur, p=0.1), + dict(type=MedianBlur, p=0.1), + dict( + type=CoarseDropout, + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict(type=GenerateTarget, encoder=codec), + dict(type=PackPoseInputs) +] +val_pipeline = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=TopdownAffine, input_size=codec['input_size']), + dict(type=PackPoseInputs) +] + +train_pipeline_stage2 = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=RandomFlip, direction='horizontal'), + dict(type=RandomHalfBody), + dict( + type=RandomBBoxTransform, + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type=TopdownAffine, input_size=codec['input_size']), + dict(type=YOLOXHSVRandomAug), + dict( + type=Albumentation, + transforms=[ + dict(type=Blur, p=0.1), + dict(type=MedianBlur, p=0.1), + dict( + type=CoarseDropout, + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type=GenerateTarget, encoder=codec), + dict(type=PackPoseInputs) +] + +# train datasets +dataset_coco = dict( + type=RepeatDataset, + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/person_keypoints_train2017.json', + data_prefix=dict(img='detection/coco/train2017/'), + pipeline=[], + ), + times=3) + +dataset_aic = dict( + type=AicDataset, + data_root=data_root, + data_mode=data_mode, + ann_file='aic/annotations/aic_train.json', + data_prefix=dict(img='pose/ai_challenge/ai_challenger_keypoint' + '_train_20170902/keypoint_train_images_20170902/'), + pipeline=[ + dict( + type=KeypointConverter, + num_keypoints=17, + mapping=[ + (0, 6), + (1, 8), + (2, 10), + (3, 5), + (4, 7), + (5, 9), + (6, 12), + (7, 14), + (8, 16), + (9, 11), + (10, 13), + (11, 15), + ]) + ], +) + +# data loaders +train_dataloader = dict( + batch_size=128 * 2, + num_workers=10, + persistent_workers=True, + sampler=dict(type=DefaultSampler, shuffle=True), + dataset=dict( + type=CombinedDataset, + metainfo=dict(from_file='configs/_base_/datasets/coco.py'), + datasets=[dataset_coco, dataset_aic], + pipeline=train_pipeline, + test_mode=False, + )) +val_dataloader = dict( + batch_size=64, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type=DefaultSampler, shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='coco/annotations/person_keypoints_val2017.json', + # bbox_file='data/coco/person_detection_results/' + # 'COCO_val2017_detections_AP_H_56_person.json', + data_prefix=dict(img='detection/coco/val2017/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks.update( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type=EMAHook, + ema_type=ExpMomentumEMA, + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type=PipelineSwitchHook, + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type=CocoMetric, + ann_file=data_root + 'coco/annotations/person_keypoints_val2017.json') +test_evaluator = val_evaluator diff --git a/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py b/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py new file mode 100644 index 0000000000..9c73e1551a --- /dev/null +++ b/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py @@ -0,0 +1,167 @@ +# Copyright (c) OpenMMLab. All rights reserved. +if '_base_': + from mmpose.configs._base_.default_runtime import * + +from mmengine.dataset import DefaultSampler +from mmengine.model import PretrainedInit +from mmengine.optim import LinearLR, MultiStepLR +from torch.optim import Adam + +from mmpose.codecs import UDPHeatmap +from mmpose.datasets import (CocoDataset, GenerateTarget, GetBBoxCenterScale, + LoadImage, PackPoseInputs, RandomFlip, + RandomHalfBody, TopdownAffine) +from mmpose.datasets.transforms.common_transforms import RandomBBoxTransform +from mmpose.evaluation import CocoMetric +from mmpose.models import (HeatmapHead, HRNet, KeypointMSELoss, + PoseDataPreprocessor, TopdownPoseEstimator) + +# runtime +train_cfg.update(max_epochs=210, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type=Adam, + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict(type=LinearLR, begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type=MultiStepLR, + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks.update(checkpoint=dict(save_best='coco/AP', rule='greater')) + +# codec settings +codec = dict( + type=UDPHeatmap, input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type=TopdownPoseEstimator, + data_preprocessor=dict( + type=PoseDataPreprocessor, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type=HRNet, + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(48, 96)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(48, 96, 192)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(48, 96, 192, 384))), + init_cfg=dict( + type=PretrainedInit, + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w48-8ef0771d.pth'), + ), + head=dict( + type=HeatmapHead, + in_channels=48, + out_channels=17, + deconv_out_channels=None, + loss=dict(type=KeypointMSELoss, use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=False, + )) + +# base dataset settings +dataset_type = CocoDataset +data_mode = 'topdown' +data_root = 'data/coco/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=RandomFlip, direction='horizontal'), + dict(type=RandomHalfBody), + dict(type=RandomBBoxTransform), + dict(type=TopdownAffine, input_size=codec['input_size'], use_udp=True), + dict(type=GenerateTarget, encoder=codec), + dict(type=PackPoseInputs) +] +val_pipeline = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=GetBBoxCenterScale), + dict(type=TopdownAffine, input_size=codec['input_size'], use_udp=True), + dict(type=PackPoseInputs) +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type=DefaultSampler, shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_train2017.json', + data_prefix=dict(img='train2017/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type=DefaultSampler, shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_val2017.json', + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', + data_prefix=dict(img='val2017/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type=CocoMetric, + ann_file=data_root + 'annotations/person_keypoints_val2017.json') +test_evaluator = val_evaluator diff --git a/mmpose/datasets/transforms/common_transforms.py b/mmpose/datasets/transforms/common_transforms.py index 8db0ff37c7..e005a3c390 100644 --- a/mmpose/datasets/transforms/common_transforms.py +++ b/mmpose/datasets/transforms/common_transforms.py @@ -649,6 +649,8 @@ def albu_builder(self, cfg: dict) -> albumentations: f'{obj_type} is not pixel-level transformations. ' 'Please use with caution.') obj_cls = getattr(albumentations, obj_type) + elif isinstance(obj_type, type): + obj_cls = obj_type else: raise TypeError(f'type must be a str, but got {type(obj_type)}') diff --git a/projects/mmpose4aigc/openpose_visualization.py b/projects/mmpose4aigc/openpose_visualization.py index b7fde6eae0..b634d07757 100644 --- a/projects/mmpose4aigc/openpose_visualization.py +++ b/projects/mmpose4aigc/openpose_visualization.py @@ -43,7 +43,9 @@ def mmpose_to_openpose_visualization(args, img_path, detector, pose_estimator): """Visualize predicted keypoints of one image in openpose format.""" # predict bbox - init_default_scope(detector.cfg.get('default_scope', 'mmdet')) + scope = detector.cfg.get('default_scope', 'mmdet') + if scope is not None: + init_default_scope(scope) det_result = inference_detector(detector, img_path) pred_instance = det_result.pred_instances.cpu().numpy() bboxes = np.concatenate( diff --git a/projects/yolox-pose/README.md b/projects/yolox_pose/README.md similarity index 100% rename from projects/yolox-pose/README.md rename to projects/yolox_pose/README.md diff --git a/projects/yolox-pose/configs/_base_/datasets b/projects/yolox_pose/configs/_base_/datasets similarity index 100% rename from projects/yolox-pose/configs/_base_/datasets rename to projects/yolox_pose/configs/_base_/datasets diff --git a/projects/yolox-pose/configs/_base_/default_runtime.py b/projects/yolox_pose/configs/_base_/default_runtime.py similarity index 100% rename from projects/yolox-pose/configs/_base_/default_runtime.py rename to projects/yolox_pose/configs/_base_/default_runtime.py diff --git a/projects/yolox_pose/configs/_base_/py_default_runtime.py b/projects/yolox_pose/configs/_base_/py_default_runtime.py new file mode 100644 index 0000000000..354d96ad0d --- /dev/null +++ b/projects/yolox_pose/configs/_base_/py_default_runtime.py @@ -0,0 +1,45 @@ +from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook, + LoggerHook, ParamSchedulerHook) +from mmengine.runner import LogProcessor, TestLoop, ValLoop +from mmengine.visualization import LocalVisBackend + +from mmpose.engine.hooks import PoseVisualizationHook +from mmpose.visualization import PoseLocalVisualizer + +default_scope = None +# hooks +default_hooks = dict( + timer=dict(type=IterTimerHook), + logger=dict(type=LoggerHook, interval=50), + param_scheduler=dict(type=ParamSchedulerHook), + checkpoint=dict(type=CheckpointHook, interval=10, max_keep_ckpts=3), + sampler_seed=dict(type=DistSamplerSeedHook), + visualization=dict(type=PoseVisualizationHook, enable=False), +) + +# multi-processing backend +env_cfg = dict( + cudnn_benchmark=False, + mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0), + dist_cfg=dict(backend='nccl'), +) + +# visualizer +vis_backends = [dict(type=LocalVisBackend)] +visualizer = dict( + type=PoseLocalVisualizer, vis_backends=vis_backends, name='visualizer') + +# logger +log_processor = dict( + type=LogProcessor, window_size=50, by_epoch=True, num_digits=6) +log_level = 'INFO' +load_from = None +resume = False + +# file I/O backend +backend_args = dict(backend='local') + +# training/validation/testing progress +train_cfg = dict() +val_cfg = dict(type=ValLoop) +test_cfg = dict(type=TestLoop) diff --git a/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py b/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py new file mode 100644 index 0000000000..2ff6f31463 --- /dev/null +++ b/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py @@ -0,0 +1,281 @@ +if '_base_': + from ._base_.py_default_runtime import * + +from datasets import (CocoDataset, FilterDetPoseAnnotations, PackDetPoseInputs, + PoseToDetConverter) +from mmcv.ops import nms +from mmdet.datasets.transforms import (Pad, RandomAffine, RandomFlip, Resize, + YOLOXHSVRandomAug) +from mmdet.engine.hooks import SyncNormHook +from mmdet.engine.schedulers import QuadraticWarmupLR +from mmdet.models import CrossEntropyLoss, DetDataPreprocessor, IoULoss, L1Loss +from mmdet.models.task_modules import BboxOverlaps2D +from mmengine.dataset import DefaultSampler +from mmengine.hooks import EMAHook +from mmengine.model import PretrainedInit +from mmengine.optim import ConstantLR, CosineAnnealingLR, OptimWrapper +from mmengine.runner import EpochBasedTrainLoop +from mmyolo.datasets.transforms import Mosaic, YOLOXMixUp +from mmyolo.engine.hooks import YOLOXModeSwitchHook +from mmyolo.models import (YOLOXPAFPN, ExpMomentumEMA, YOLODetector, + YOLOXCSPDarknet) +from models import (OksLoss, PoseBatchSyncRandomResize, PoseSimOTAAssigner, + YOLOXPoseHead, YOLOXPoseHeadModule) +from torch.nn import BatchNorm2d, SiLU +from torch.optim import AdamW + +from mmpose.datasets.transforms import LoadImage +from mmpose.evaluation import CocoMetric + +# model settings +model = dict( + type=YOLODetector, + use_syncbn=False, + init_cfg=dict( + type=PretrainedInit, + checkpoint='https://download.openmmlab.com/mmyolo/v0/yolox/' + 'yolox_s_fast_8xb32-300e-rtmdet-hyp_coco/yolox_s_fast_' + '8xb32-300e-rtmdet-hyp_coco_20230210_134645-3a8dfbd7.pth'), + data_preprocessor=dict( + type=DetDataPreprocessor, + pad_size_divisor=32, + batch_augments=[ + dict( + type=PoseBatchSyncRandomResize, + random_size_range=(480, 800), + size_divisor=32, + interval=1) + ]), + backbone=dict( + type=YOLOXCSPDarknet, + deepen_factor=0.33, + widen_factor=0.5, + out_indices=(2, 3, 4), + spp_kernal_sizes=(5, 9, 13), + norm_cfg=dict(type=BatchNorm2d, momentum=0.03, eps=0.001), + act_cfg=dict(type=SiLU, inplace=True), + ), + neck=dict( + type=YOLOXPAFPN, + deepen_factor=0.33, + widen_factor=0.5, + in_channels=[256, 512, 1024], + out_channels=256, + norm_cfg=dict(type=BatchNorm2d, momentum=0.03, eps=0.001), + act_cfg=dict(type=SiLU, inplace=True)), + bbox_head=dict( + type=YOLOXPoseHead, + head_module=dict( + type=YOLOXPoseHeadModule, + num_classes=1, + in_channels=256, + feat_channels=256, + widen_factor=0.5, + stacked_convs=2, + num_keypoints=17, + featmap_strides=(8, 16, 32), + use_depthwise=False, + norm_cfg=dict(type=BatchNorm2d, momentum=0.03, eps=0.001), + act_cfg=dict(type=SiLU, inplace=True), + ), + loss_cls=dict( + type=CrossEntropyLoss, + use_sigmoid=True, + reduction='sum', + loss_weight=1.0), + loss_bbox=dict( + type=IoULoss, + mode='square', + eps=1e-16, + reduction='sum', + loss_weight=5.0), + loss_obj=dict( + type=CrossEntropyLoss, + use_sigmoid=True, + reduction='sum', + loss_weight=1.0), + loss_pose=dict( + type=OksLoss, + metainfo='configs/_base_/datasets/coco.py', + loss_weight=30.0), + loss_bbox_aux=dict(type=L1Loss, reduction='sum', loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type=PoseSimOTAAssigner, + center_radius=2.5, + iou_calculator=dict(type=BboxOverlaps2D), + oks_calculator=dict( + type=OksLoss, metainfo='configs/_base_/datasets/coco.py'))), + test_cfg=dict( + yolox_style=True, + multi_label=False, + score_thr=0.001, + max_per_img=300, + nms=dict(type=nms, iou_threshold=0.65))) + +# data related +img_scale = (640, 640) + +# pipelines +pre_transform = [ + dict(type=LoadImage, backend_args=backend_args), + dict(type=PoseToDetConverter) +] + +train_pipeline_stage1 = [ + *pre_transform, + dict( + type=Mosaic, + img_scale=img_scale, + pad_val=114.0, + pre_transform=pre_transform), + dict( + type=RandomAffine, + scaling_ratio_range=(0.75, 1.0), + border=(-img_scale[0] // 2, -img_scale[1] // 2)), + dict( + type=YOLOXMixUp, + img_scale=img_scale, + ratio_range=(0.8, 1.6), + pad_val=114.0, + pre_transform=pre_transform), + dict(type=YOLOXHSVRandomAug), + dict(type=RandomFlip, prob=0.5), + dict(type=FilterDetPoseAnnotations, keep_empty=False), + dict( + type=PackDetPoseInputs, + meta_keys=('img_id', 'img_path', 'ori_shape', 'img_shape')) +] + +train_pipeline_stage2 = [ + *pre_transform, + dict(type=Resize, scale=img_scale, keep_ratio=True), + dict( + type=Pad, pad_to_square=True, pad_val=dict(img=(114.0, 114.0, 114.0))), + dict(type=YOLOXHSVRandomAug), + dict(type=RandomFlip, prob=0.5), + dict(type=FilterDetPoseAnnotations, keep_empty=False), + dict(type=PackDetPoseInputs) +] + +test_pipeline = [ + *pre_transform, + dict(type=Resize, scale=img_scale, keep_ratio=True), + dict( + type=Pad, pad_to_square=True, pad_val=dict(img=(114.0, 114.0, 114.0))), + dict( + type=PackDetPoseInputs, + meta_keys=('id', 'img_id', 'img_path', 'ori_shape', 'img_shape', + 'scale_factor', 'flip_indices')) +] + +# dataset settings +dataset_type = CocoDataset +data_mode = 'bottomup' +data_root = 'data/coco/' + +train_dataloader = dict( + batch_size=32, + num_workers=8, + persistent_workers=True, + pin_memory=True, + sampler=dict(type=DefaultSampler, shuffle=True), + dataset=dict( + type=dataset_type, + data_mode=data_mode, + data_root=data_root, + ann_file='annotations/person_keypoints_train2017.json', + data_prefix=dict(img='train2017/'), + filter_cfg=dict(filter_empty_gt=False, min_size=32), + pipeline=train_pipeline_stage1)) + +val_dataloader = dict( + batch_size=1, + num_workers=2, + persistent_workers=True, + pin_memory=True, + drop_last=False, + sampler=dict(type=DefaultSampler, shuffle=False), + dataset=dict( + type=dataset_type, + data_mode=data_mode, + data_root=data_root, + ann_file='annotations/person_keypoints_val2017.json', + data_prefix=dict(img='val2017/'), + test_mode=True, + pipeline=test_pipeline)) + +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type=CocoMetric, + ann_file=data_root + 'annotations/person_keypoints_val2017.json', + score_mode='bbox') +test_evaluator = val_evaluator + +default_hooks.update( + dict(checkpoint=dict(save_best='coco/AP', rule='greater'))) + +# optimizer +base_lr = 0.004 +max_epochs = 300 +num_last_epochs = 20 +optim_wrapper = dict( + type=OptimWrapper, + optimizer=dict(type=AdamW, lr=base_lr, weight_decay=0.05), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +param_scheduler = [ + dict( + # use quadratic formula to warm up 5 epochs + # and lr is updated by iteration + type=QuadraticWarmupLR, + by_epoch=True, + begin=0, + end=5, + convert_to_iter_based=True), + dict( + # use cosine lr from 5 to 285 epoch + type=CosineAnnealingLR, + eta_min=base_lr * 0.05, + begin=5, + T_max=max_epochs - num_last_epochs, + end=max_epochs - num_last_epochs, + by_epoch=True, + convert_to_iter_based=True), + dict( + # use fixed lr during last num_last_epochs epochs + type=ConstantLR, + by_epoch=True, + factor=1, + begin=max_epochs - num_last_epochs, + end=max_epochs, + ) +] + +# runtime +custom_hooks = [ + dict( + type=YOLOXModeSwitchHook, + num_last_epochs=num_last_epochs, + new_train_pipeline=train_pipeline_stage2, + priority=48), + dict(type=SyncNormHook, priority=48), + dict( + type=EMAHook, + ema_type=ExpMomentumEMA, + momentum=0.0002, + update_buffers=True, + strict_load=False, + priority=49) +] + +train_cfg = dict( + type=EpochBasedTrainLoop, + max_epochs=max_epochs, + val_interval=10, + dynamic_intervals=[(max_epochs - num_last_epochs, 1)]) + +auto_scale_lr = dict(base_batch_size=256) diff --git a/projects/yolox-pose/configs/yolox-pose_l_4xb64-300e_coco.py b/projects/yolox_pose/configs/yolox-pose_l_4xb64-300e_coco.py similarity index 100% rename from projects/yolox-pose/configs/yolox-pose_l_4xb64-300e_coco.py rename to projects/yolox_pose/configs/yolox-pose_l_4xb64-300e_coco.py diff --git a/projects/yolox-pose/configs/yolox-pose_m_4xb64-300e_coco.py b/projects/yolox_pose/configs/yolox-pose_m_4xb64-300e_coco.py similarity index 100% rename from projects/yolox-pose/configs/yolox-pose_m_4xb64-300e_coco.py rename to projects/yolox_pose/configs/yolox-pose_m_4xb64-300e_coco.py diff --git a/projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py b/projects/yolox_pose/configs/yolox-pose_s_8xb32-300e_coco.py similarity index 100% rename from projects/yolox-pose/configs/yolox-pose_s_8xb32-300e_coco.py rename to projects/yolox_pose/configs/yolox-pose_s_8xb32-300e_coco.py diff --git a/projects/yolox-pose/configs/yolox-pose_tiny_4xb64-300e_coco.py b/projects/yolox_pose/configs/yolox-pose_tiny_4xb64-300e_coco.py similarity index 100% rename from projects/yolox-pose/configs/yolox-pose_tiny_4xb64-300e_coco.py rename to projects/yolox_pose/configs/yolox-pose_tiny_4xb64-300e_coco.py diff --git a/projects/yolox-pose/datasets/__init__.py b/projects/yolox_pose/datasets/__init__.py similarity index 100% rename from projects/yolox-pose/datasets/__init__.py rename to projects/yolox_pose/datasets/__init__.py diff --git a/projects/yolox-pose/datasets/bbox_keypoint_structure.py b/projects/yolox_pose/datasets/bbox_keypoint_structure.py similarity index 100% rename from projects/yolox-pose/datasets/bbox_keypoint_structure.py rename to projects/yolox_pose/datasets/bbox_keypoint_structure.py diff --git a/projects/yolox-pose/datasets/coco_dataset.py b/projects/yolox_pose/datasets/coco_dataset.py similarity index 100% rename from projects/yolox-pose/datasets/coco_dataset.py rename to projects/yolox_pose/datasets/coco_dataset.py diff --git a/projects/yolox-pose/datasets/transforms.py b/projects/yolox_pose/datasets/transforms.py similarity index 100% rename from projects/yolox-pose/datasets/transforms.py rename to projects/yolox_pose/datasets/transforms.py diff --git a/projects/yolox-pose/demo b/projects/yolox_pose/demo similarity index 100% rename from projects/yolox-pose/demo rename to projects/yolox_pose/demo diff --git a/projects/yolox-pose/models/__init__.py b/projects/yolox_pose/models/__init__.py similarity index 100% rename from projects/yolox-pose/models/__init__.py rename to projects/yolox_pose/models/__init__.py diff --git a/projects/yolox-pose/models/assigner.py b/projects/yolox_pose/models/assigner.py similarity index 100% rename from projects/yolox-pose/models/assigner.py rename to projects/yolox_pose/models/assigner.py diff --git a/projects/yolox-pose/models/data_preprocessor.py b/projects/yolox_pose/models/data_preprocessor.py similarity index 100% rename from projects/yolox-pose/models/data_preprocessor.py rename to projects/yolox_pose/models/data_preprocessor.py diff --git a/projects/yolox-pose/models/oks_loss.py b/projects/yolox_pose/models/oks_loss.py similarity index 100% rename from projects/yolox-pose/models/oks_loss.py rename to projects/yolox_pose/models/oks_loss.py diff --git a/projects/yolox-pose/models/utils.py b/projects/yolox_pose/models/utils.py similarity index 100% rename from projects/yolox-pose/models/utils.py rename to projects/yolox_pose/models/utils.py diff --git a/projects/yolox-pose/models/yolox_pose_head.py b/projects/yolox_pose/models/yolox_pose_head.py similarity index 100% rename from projects/yolox-pose/models/yolox_pose_head.py rename to projects/yolox_pose/models/yolox_pose_head.py diff --git a/projects/yolox-pose/tools b/projects/yolox_pose/tools similarity index 100% rename from projects/yolox-pose/tools rename to projects/yolox_pose/tools diff --git a/setup.cfg b/setup.cfg index 06067ee873..e3a37d1b6d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,3 +19,8 @@ known_first_party = mmpose known_third_party = PIL,cv2,h5py,json_tricks,matplotlib,mmcv,munkres,numpy,pytest,pytorch_sphinx_theme,requests,scipy,seaborn,spacepy,titlecase,torch,torchvision,webcam_apis,xmltodict,xtcocotools no_lines_before = STDLIB,LOCALFOLDER default_section = THIRDPARTY + +[flake8] +per-file-ignores = + mmpose/configs/*: F401,F403,F405 + projects/*/configs/*: F401,F403,F405 diff --git a/tools/misc/browse_dataset.py b/tools/misc/browse_dataset.py index 2ac50e1167..5a914476ee 100644 --- a/tools/misc/browse_dataset.py +++ b/tools/misc/browse_dataset.py @@ -83,7 +83,9 @@ def main(): backend_args = cfg.get('backend_args', dict(backend='local')) # register all modules in mmpose into the registries - init_default_scope(cfg.get('default_scope', 'mmpose')) + scope = cfg.get('default_scope', 'mmpose') + if scope is not None: + init_default_scope(scope) if args.mode == 'original': cfg[f'{args.phase}_dataloader'].dataset.pipeline = [] diff --git a/tools/train.py b/tools/train.py index e1930c20c7..1fd423ad3f 100644 --- a/tools/train.py +++ b/tools/train.py @@ -96,8 +96,9 @@ def merge_args(cfg, args): # enable automatic-mixed-precision training if args.amp is True: - optim_wrapper = cfg.optim_wrapper.get('type', 'OptimWrapper') - assert optim_wrapper in ['OptimWrapper', 'AmpOptimWrapper'], \ + from mmengine.optim import AmpOptimWrapper, OptimWrapper + optim_wrapper = cfg.optim_wrapper.get('type', OptimWrapper) + assert optim_wrapper in (OptimWrapper, AmpOptimWrapper), \ '`--amp` is not supported custom optimizer wrapper type ' \ f'`{optim_wrapper}.' cfg.optim_wrapper.type = 'AmpOptimWrapper' From 2af529d2b18f84558d830f14828742f1a84f844a Mon Sep 17 00:00:00 2001 From: huangjiyi <43315610+huangjiyi@users.noreply.github.com> Date: Mon, 19 Jun 2023 10:11:46 +0800 Subject: [PATCH 33/52] [Doc] fix train_and_test.md (#2467) --- docs/en/user_guides/train_and_test.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/user_guides/train_and_test.md b/docs/en/user_guides/train_and_test.md index 86a219e325..6bcc88fc3b 100644 --- a/docs/en/user_guides/train_and_test.md +++ b/docs/en/user_guides/train_and_test.md @@ -309,7 +309,7 @@ different port and visible devices. ```shell CUDA_VISIBLE_DEVICES=0,1,2,3 PORT=29500 bash ./tools/dist_test.sh ${CONFIG_FILE1} ${CHECKPOINT_FILE} 4 [PY_ARGS] -CUDA_VISIBLE_DEVICES=4,5,6,7 GPUS=29501 bash ./tools/dist_test.sh ${CONFIG_FILE2} ${CHECKPOINT_FILE} 4 [PY_ARGS] +CUDA_VISIBLE_DEVICES=4,5,6,7 PORT=29501 bash ./tools/dist_test.sh ${CONFIG_FILE2} ${CHECKPOINT_FILE} 4 [PY_ARGS] ``` ### Test with multiple machines From 4679b30de4229358eca0d3d6ea33b74b16910b4b Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Mon, 19 Jun 2023 11:39:49 +0800 Subject: [PATCH 34/52] [Fix] Fix KeypointMSELoss and add ut (#2469) --- mmpose/models/losses/heatmap_loss.py | 2 +- .../test_losses/test_heatmap_losses.py | 56 ++++++++++++++++++- 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/mmpose/models/losses/heatmap_loss.py b/mmpose/models/losses/heatmap_loss.py index a105149468..ffe5cd1e80 100644 --- a/mmpose/models/losses/heatmap_loss.py +++ b/mmpose/models/losses/heatmap_loss.py @@ -106,7 +106,7 @@ def _get_mask(self, target: Tensor, target_weights: Optional[Tensor], # Mask by ``skip_empty_channel`` if self.skip_empty_channel: - _mask = (target != 0).flatten(2).any() + _mask = (target != 0).flatten(2).any(dim=2) ndim_pad = target.ndim - _mask.ndim _mask = _mask.view(_mask.shape + (1, ) * ndim_pad) diff --git a/tests/test_models/test_losses/test_heatmap_losses.py b/tests/test_models/test_losses/test_heatmap_losses.py index bfabc84749..00da170389 100644 --- a/tests/test_models/test_losses/test_heatmap_losses.py +++ b/tests/test_models/test_losses/test_heatmap_losses.py @@ -4,7 +4,8 @@ import torch from mmpose.models.losses.heatmap_loss import (AdaptiveWingLoss, - FocalHeatmapLoss) + FocalHeatmapLoss, + KeypointMSELoss) class TestAdaptiveWingLoss(TestCase): @@ -63,3 +64,56 @@ def test_loss(self): loss(fake_pred, fake_label, fake_weight), torch.tensor(5.8062), atol=1e-4)) + + +class TestKeypointMSELoss(TestCase): + + def test_loss(self): + + # test loss w/o target_weight and without mask + loss = KeypointMSELoss( + use_target_weight=False, skip_empty_channel=False) + + fake_pred = torch.zeros((1, 4, 4, 4)) + fake_label = torch.zeros((1, 4, 4, 4)) + + self.assertTrue( + torch.allclose(loss(fake_pred, fake_label), torch.tensor(0.))) + + fake_pred = torch.ones((1, 4, 4, 4)) * 0.5 + fake_label = torch.ones((1, 4, 4, 4)) * 0.5 + self.assertTrue( + torch.allclose( + loss(fake_pred, fake_label), torch.tensor(0.), atol=1e-4)) + + # test loss w/ target_weight and without mask + loss = KeypointMSELoss( + use_target_weight=True, skip_empty_channel=False) + + fake_weight = torch.ones((1, 4)).float() + self.assertTrue( + torch.allclose( + loss(fake_pred, fake_label, fake_weight), + torch.tensor(0.), + atol=1e-4)) + + # test loss w/ target_weight and with mask + loss = KeypointMSELoss( + use_target_weight=True, skip_empty_channel=False) + + fake_mask = torch.ones((1, 1, 4, 4)).float() + self.assertTrue( + torch.allclose( + loss(fake_pred, fake_label, fake_weight, fake_mask), + torch.tensor(0.), + atol=1e-4)) + + # test loss w/ target_weight and skip empty channels + loss = KeypointMSELoss(use_target_weight=True, skip_empty_channel=True) + + fake_mask = torch.ones((1, 1, 4, 4)).float() + self.assertTrue( + torch.allclose( + loss(fake_pred, fake_label, fake_weight, fake_mask), + torch.tensor(0.), + atol=1e-4)) From 1340c3aa91b0d26f1b5c3926c914f396e13c7902 Mon Sep 17 00:00:00 2001 From: Chenxi Cui <52842460+Billccx@users.noreply.github.com> Date: Mon, 19 Jun 2023 11:41:27 +0800 Subject: [PATCH 35/52] [Feature] Add visibility prediction head (#2417) --- .../datasets/transforms/common_transforms.py | 10 + mmpose/datasets/transforms/formatting.py | 10 +- mmpose/models/heads/__init__.py | 7 +- mmpose/models/heads/hybrid_heads/__init__.py | 5 +- mmpose/models/heads/hybrid_heads/vis_head.py | 229 ++++++++++++++++++ mmpose/models/losses/classification_loss.py | 9 +- mmpose/models/utils/tta.py | 15 ++ .../test_hybrid_heads/test_vis_head.py | 190 +++++++++++++++ 8 files changed, 465 insertions(+), 10 deletions(-) create mode 100644 mmpose/models/heads/hybrid_heads/vis_head.py create mode 100644 tests/test_models/test_heads/test_hybrid_heads/test_vis_head.py diff --git a/mmpose/datasets/transforms/common_transforms.py b/mmpose/datasets/transforms/common_transforms.py index e005a3c390..87068246f8 100644 --- a/mmpose/datasets/transforms/common_transforms.py +++ b/mmpose/datasets/transforms/common_transforms.py @@ -1031,6 +1031,16 @@ def transform(self, results: Dict) -> Optional[dict]: results.update(encoded) + if results.get('keypoint_weights', None) is not None: + results['transformed_keypoints_visible'] = results[ + 'keypoint_weights'] + elif results.get('keypoints', None) is not None: + results['transformed_keypoints_visible'] = results[ + 'keypoints_visible'] + else: + raise ValueError('GenerateTarget requires \'keypoint_weights\' or' + ' \'keypoints_visible\' in the results.') + return results def __repr__(self) -> str: diff --git a/mmpose/datasets/transforms/formatting.py b/mmpose/datasets/transforms/formatting.py index 403147120d..05aeef179f 100644 --- a/mmpose/datasets/transforms/formatting.py +++ b/mmpose/datasets/transforms/formatting.py @@ -129,7 +129,8 @@ class PackPoseInputs(BaseTransform): 'keypoint_x_labels': 'keypoint_x_labels', 'keypoint_y_labels': 'keypoint_y_labels', 'keypoint_weights': 'keypoint_weights', - 'instance_coords': 'instance_coords' + 'instance_coords': 'instance_coords', + 'transformed_keypoints_visible': 'keypoints_visible', } # items in `field_mapping_table` will be packed into @@ -196,6 +197,10 @@ def transform(self, results: dict) -> dict: if self.pack_transformed and 'transformed_keypoints' in results: gt_instances.set_field(results['transformed_keypoints'], 'transformed_keypoints') + if self.pack_transformed and \ + 'transformed_keypoints_visible' in results: + gt_instances.set_field(results['transformed_keypoints_visible'], + 'transformed_keypoints_visible') data_sample.gt_instances = gt_instances @@ -205,7 +210,8 @@ def transform(self, results: dict) -> dict: if key in results: # For pose-lifting, store only target-related fields if 'lifting_target_label' in results and key in { - 'keypoint_labels', 'keypoint_weights' + 'keypoint_labels', 'keypoint_weights', + 'transformed_keypoints_visible' }: continue if isinstance(results[key], list): diff --git a/mmpose/models/heads/__init__.py b/mmpose/models/heads/__init__.py index 75a626569b..e01f2269e3 100644 --- a/mmpose/models/heads/__init__.py +++ b/mmpose/models/heads/__init__.py @@ -3,7 +3,7 @@ from .coord_cls_heads import RTMCCHead, SimCCHead from .heatmap_heads import (AssociativeEmbeddingHead, CIDHead, CPMHead, HeatmapHead, MSPNHead, ViPNASHead) -from .hybrid_heads import DEKRHead +from .hybrid_heads import DEKRHead, VisPredictHead from .regression_heads import (DSNTHead, IntegralRegressionHead, RegressionHead, RLEHead, TemporalRegressionHead, TrajectoryRegressionHead) @@ -11,6 +11,7 @@ __all__ = [ 'BaseHead', 'HeatmapHead', 'CPMHead', 'MSPNHead', 'ViPNASHead', 'RegressionHead', 'IntegralRegressionHead', 'SimCCHead', 'RLEHead', - 'DSNTHead', 'AssociativeEmbeddingHead', 'DEKRHead', 'CIDHead', 'RTMCCHead', - 'TemporalRegressionHead', 'TrajectoryRegressionHead' + 'DSNTHead', 'AssociativeEmbeddingHead', 'DEKRHead', 'VisPredictHead', + 'CIDHead', 'RTMCCHead', 'TemporalRegressionHead', + 'TrajectoryRegressionHead' ] diff --git a/mmpose/models/heads/hybrid_heads/__init__.py b/mmpose/models/heads/hybrid_heads/__init__.py index 55d5a211c1..6431b6a2c2 100644 --- a/mmpose/models/heads/hybrid_heads/__init__.py +++ b/mmpose/models/heads/hybrid_heads/__init__.py @@ -1,6 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. from .dekr_head import DEKRHead +from .vis_head import VisPredictHead -__all__ = [ - 'DEKRHead', -] +__all__ = ['DEKRHead', 'VisPredictHead'] diff --git a/mmpose/models/heads/hybrid_heads/vis_head.py b/mmpose/models/heads/hybrid_heads/vis_head.py new file mode 100644 index 0000000000..e9ea271ac5 --- /dev/null +++ b/mmpose/models/heads/hybrid_heads/vis_head.py @@ -0,0 +1,229 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Tuple, Union + +import torch +from torch import Tensor, nn + +from mmpose.models.utils.tta import flip_visibility +from mmpose.registry import MODELS +from mmpose.utils.tensor_utils import to_numpy +from mmpose.utils.typing import (ConfigType, InstanceList, OptConfigType, + OptSampleList, Predictions) +from ..base_head import BaseHead + + +@MODELS.register_module() +class VisPredictHead(BaseHead): + """VisPredictHead must be used together with other heads. It can predict + keypoints coordinates of and their visibility simultaneously. In the + current version, it only supports top-down approaches. + + Args: + pose_cfg (Config): Config to construct keypoints prediction head + loss (Config): Config for visibility loss. Defaults to use + :class:`BCELoss` + use_sigmoid (bool): Whether to use sigmoid activation function + init_cfg (Config, optional): Config to control the initialization. See + :attr:`default_init_cfg` for default settings + """ + + def __init__(self, + pose_cfg: ConfigType, + loss: ConfigType = dict( + type='BCELoss', use_target_weight=False, + with_logits=True), + use_sigmoid: bool = False, + init_cfg: OptConfigType = None): + + if init_cfg is None: + init_cfg = self.default_init_cfg + + super().__init__(init_cfg) + + self.in_channels = pose_cfg['in_channels'] + if pose_cfg.get('num_joints', None) is not None: + self.out_channels = pose_cfg['num_joints'] + elif pose_cfg.get('out_channels', None) is not None: + self.out_channels = pose_cfg['out_channels'] + else: + raise ValueError('VisPredictHead requires \'num_joints\' or' + ' \'out_channels\' in the pose_cfg.') + + self.loss_module = MODELS.build(loss) + + self.pose_head = MODELS.build(pose_cfg) + self.pose_cfg = pose_cfg + + self.use_sigmoid = use_sigmoid + + modules = [ + nn.AdaptiveAvgPool2d(1), + nn.Flatten(), + nn.Linear(self.in_channels, self.out_channels) + ] + if use_sigmoid: + modules.append(nn.Sigmoid()) + + self.vis_head = nn.Sequential(*modules) + + def vis_forward(self, feats: Tuple[Tensor]): + """Forward the vis_head. The input is multi scale feature maps and the + output is coordinates visibility. + + Args: + feats (Tuple[Tensor]): Multi scale feature maps. + + Returns: + Tensor: output coordinates visibility. + """ + x = feats[-1] + while len(x.shape) < 4: + x.unsqueeze_(-1) + x = self.vis_head(x) + return x.reshape(-1, self.out_channels) + + def forward(self, feats: Tuple[Tensor]): + """Forward the network. The input is multi scale feature maps and the + output is coordinates and coordinates visibility. + + Args: + feats (Tuple[Tensor]): Multi scale feature maps. + + Returns: + Tuple[Tensor]: output coordinates and coordinates visibility. + """ + x_pose = self.pose_head.forward(feats) + x_vis = self.vis_forward(feats) + + return x_pose, x_vis + + def integrate(self, batch_vis: Tensor, + pose_preds: Union[Tuple, Predictions]) -> InstanceList: + """Add keypoints visibility prediction to pose prediction. + + Overwrite the original keypoint_scores. + """ + if isinstance(pose_preds, tuple): + pose_pred_instances, pose_pred_fields = pose_preds + else: + pose_pred_instances = pose_preds + pose_pred_fields = None + + batch_vis_np = to_numpy(batch_vis, unzip=True) + + assert len(pose_pred_instances) == len(batch_vis_np) + for index, _ in enumerate(pose_pred_instances): + pose_pred_instances[index].keypoint_scores = batch_vis_np[index] + + return pose_pred_instances, pose_pred_fields + + def predict(self, + feats: Tuple[Tensor], + batch_data_samples: OptSampleList, + test_cfg: ConfigType = {}) -> Predictions: + """Predict results from features. + + Args: + feats (Tuple[Tensor] | List[Tuple[Tensor]]): The multi-stage + features (or multiple multi-stage features in TTA) + batch_data_samples (List[:obj:`PoseDataSample`]): The batch + data samples + test_cfg (dict): The runtime config for testing process. Defaults + to {} + + Returns: + Union[InstanceList | Tuple[InstanceList | PixelDataList]]: If + posehead's ``test_cfg['output_heatmap']==True``, return both + pose and heatmap prediction; otherwise only return the pose + prediction. + + The pose prediction is a list of ``InstanceData``, each contains + the following fields: + + - keypoints (np.ndarray): predicted keypoint coordinates in + shape (num_instances, K, D) where K is the keypoint number + and D is the keypoint dimension + - keypoint_scores (np.ndarray): predicted keypoint scores in + shape (num_instances, K) + - keypoint_visibility (np.ndarray): predicted keypoints + visibility in shape (num_instances, K) + + The heatmap prediction is a list of ``PixelData``, each contains + the following fields: + + - heatmaps (Tensor): The predicted heatmaps in shape (K, h, w) + """ + if test_cfg.get('flip_test', False): + # TTA: flip test -> feats = [orig, flipped] + assert isinstance(feats, list) and len(feats) == 2 + flip_indices = batch_data_samples[0].metainfo['flip_indices'] + _feats, _feats_flip = feats + + _batch_vis = self.vis_forward(_feats) + _batch_vis_flip = flip_visibility( + self.vis_forward(_feats_flip), flip_indices=flip_indices) + batch_vis = (_batch_vis + _batch_vis_flip) * 0.5 + else: + batch_vis = self.vis_forward(feats) # (B, K, D) + + batch_vis.unsqueeze_(dim=1) # (B, N, K, D) + + if not self.use_sigmoid: + batch_vis = torch.sigmoid(batch_vis) + + batch_pose = self.pose_head.predict(feats, batch_data_samples, + test_cfg) + + return self.integrate(batch_vis, batch_pose) + + def vis_accuracy(self, vis_pred_outputs, vis_labels): + """Calculate visibility prediction accuracy.""" + probabilities = torch.sigmoid(torch.flatten(vis_pred_outputs)) + threshold = 0.5 + predictions = (probabilities >= threshold).int() + labels = torch.flatten(vis_labels) + correct = torch.sum(predictions == labels).item() + accuracy = correct / len(labels) + return torch.tensor(accuracy) + + def loss(self, + feats: Tuple[Tensor], + batch_data_samples: OptSampleList, + train_cfg: OptConfigType = {}) -> dict: + """Calculate losses from a batch of inputs and data samples. + + Args: + feats (Tuple[Tensor]): The multi-stage features + batch_data_samples (List[:obj:`PoseDataSample`]): The batch + data samples + train_cfg (dict): The runtime config for training process. + Defaults to {} + + Returns: + dict: A dictionary of losses. + """ + vis_pred_outputs = self.vis_forward(feats) + vis_labels = torch.cat([ + d.gt_instance_labels.keypoint_weights for d in batch_data_samples + ]) + + # calculate vis losses + losses = dict() + loss_vis = self.loss_module(vis_pred_outputs, vis_labels) + + losses.update(loss_vis=loss_vis) + + # calculate vis accuracy + acc_vis = self.vis_accuracy(vis_pred_outputs, vis_labels) + losses.update(acc_vis=acc_vis) + + # calculate keypoints losses + loss_kpt = self.pose_head.loss(feats, batch_data_samples) + losses.update(loss_kpt) + + return losses + + @property + def default_init_cfg(self): + init_cfg = [dict(type='Normal', layer=['Linear'], std=0.01, bias=0)] + return init_cfg diff --git a/mmpose/models/losses/classification_loss.py b/mmpose/models/losses/classification_loss.py index 6c3bdf502b..4605acabd3 100644 --- a/mmpose/models/losses/classification_loss.py +++ b/mmpose/models/losses/classification_loss.py @@ -14,11 +14,16 @@ class BCELoss(nn.Module): use_target_weight (bool): Option to use weighted loss. Different joint types may have different target weights. loss_weight (float): Weight of the loss. Default: 1.0. + with_logits (bool): Whether to use BCEWithLogitsLoss. Default: False. """ - def __init__(self, use_target_weight=False, loss_weight=1.): + def __init__(self, + use_target_weight=False, + loss_weight=1., + with_logits=False): super().__init__() - self.criterion = F.binary_cross_entropy + self.criterion = F.binary_cross_entropy if not with_logits\ + else F.binary_cross_entropy_with_logits self.use_target_weight = use_target_weight self.loss_weight = loss_weight diff --git a/mmpose/models/utils/tta.py b/mmpose/models/utils/tta.py index 0add48a422..41d2f2fd47 100644 --- a/mmpose/models/utils/tta.py +++ b/mmpose/models/utils/tta.py @@ -114,6 +114,21 @@ def flip_coordinates(coords: Tensor, flip_indices: List[int], return coords +def flip_visibility(vis: Tensor, flip_indices: List[int]): + """Flip keypoints visibility for test-time augmentation. + + Args: + vis (Tensor): The keypoints visibility to flip. Should be a tensor + in shape [B, K] + flip_indices (List[int]): The indices of each keypoint's symmetric + keypoint + """ + assert vis.ndim == 2 + + vis = vis[:, flip_indices] + return vis + + def aggregate_heatmaps(heatmaps: List[Tensor], size: Optional[Tuple[int, int]], align_corners: bool = False, diff --git a/tests/test_models/test_heads/test_hybrid_heads/test_vis_head.py b/tests/test_models/test_heads/test_hybrid_heads/test_vis_head.py new file mode 100644 index 0000000000..a6aecc2852 --- /dev/null +++ b/tests/test_models/test_heads/test_hybrid_heads/test_vis_head.py @@ -0,0 +1,190 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import unittest +from typing import List, Tuple +from unittest import TestCase + +import torch +from mmengine.structures import InstanceData, PixelData +from torch import nn + +from mmpose.models.heads import VisPredictHead +from mmpose.testing import get_packed_inputs + + +class TestVisPredictHead(TestCase): + + def _get_feats( + self, + batch_size: int = 2, + feat_shapes: List[Tuple[int, int, int]] = [(32, 8, 6)], + ): + feats = [ + torch.rand((batch_size, ) + shape, dtype=torch.float32) + for shape in feat_shapes + ] + return feats + + def test_init(self): + codec = dict( + type='MSRAHeatmap', + input_size=(192, 256), + heatmap_size=(48, 64), + sigma=2.) + + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec)) + + self.assertTrue(isinstance(head.vis_head, nn.Sequential)) + self.assertEqual(head.vis_head[2].weight.shape, (17, 32)) + self.assertIsNotNone(head.pose_head) + + def test_forward(self): + + codec = dict( + type='MSRAHeatmap', + input_size=(192, 256), + heatmap_size=(48, 64), + sigma=2) + + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec)) + + feats = [torch.rand(1, 32, 128, 128)] + output_pose, output_vis = head.forward(feats) + + self.assertIsInstance(output_pose, torch.Tensor) + self.assertEqual(output_pose.shape, (1, 17, 128, 128)) + + self.assertIsInstance(output_vis, torch.Tensor) + self.assertEqual(output_vis.shape, (1, 17)) + + def test_predict(self): + + codec = dict( + type='MSRAHeatmap', + input_size=(192, 256), + heatmap_size=(48, 64), + sigma=2.) + + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec)) + + feats = self._get_feats(batch_size=2, feat_shapes=[(32, 128, 128)]) + batch_data_samples = get_packed_inputs(batch_size=2)['data_samples'] + + preds, _ = head.predict(feats, batch_data_samples) + + self.assertTrue(len(preds), 2) + self.assertIsInstance(preds[0], InstanceData) + self.assertEqual(preds[0].keypoints.shape, + batch_data_samples[0].gt_instances.keypoints.shape) + self.assertEqual( + preds[0].keypoint_scores.shape, + batch_data_samples[0].gt_instance_labels.keypoint_weights.shape) + + # output heatmap + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + decoder=codec)) + feats = self._get_feats(batch_size=2, feat_shapes=[(32, 8, 6)]) + batch_data_samples = get_packed_inputs(batch_size=2)['data_samples'] + _, pred_heatmaps = head.predict( + feats, batch_data_samples, test_cfg=dict(output_heatmaps=True)) + + self.assertIsInstance(pred_heatmaps[0], PixelData) + self.assertEqual(pred_heatmaps[0].heatmaps.shape, (17, 64, 48)) + + def test_tta(self): + # flip test: vis and heatmap + decoder_cfg = dict( + type='MSRAHeatmap', + input_size=(192, 256), + heatmap_size=(48, 64), + sigma=2.) + + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + decoder=decoder_cfg)) + + feats = self._get_feats(batch_size=2, feat_shapes=[(32, 8, 6)]) + batch_data_samples = get_packed_inputs(batch_size=2)['data_samples'] + preds, _ = head.predict([feats, feats], + batch_data_samples, + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + + self.assertTrue(len(preds), 2) + self.assertIsInstance(preds[0], InstanceData) + self.assertEqual(preds[0].keypoints.shape, + batch_data_samples[0].gt_instances.keypoints.shape) + self.assertEqual( + preds[0].keypoint_scores.shape, + batch_data_samples[0].gt_instance_labels.keypoint_weights.shape) + + def test_loss(self): + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + )) + + feats = self._get_feats(batch_size=2, feat_shapes=[(32, 8, 6)]) + batch_data_samples = get_packed_inputs(batch_size=2)['data_samples'] + losses = head.loss(feats, batch_data_samples) + self.assertIsInstance(losses['loss_kpt'], torch.Tensor) + self.assertEqual(losses['loss_kpt'].shape, torch.Size(())) + self.assertIsInstance(losses['acc_pose'], torch.Tensor) + + self.assertIsInstance(losses['loss_vis'], torch.Tensor) + self.assertEqual(losses['loss_vis'].shape, torch.Size(())) + self.assertIsInstance(losses['acc_vis'], torch.Tensor) + + head = VisPredictHead( + pose_cfg=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + )) + + feats = self._get_feats(batch_size=2, feat_shapes=[(32, 8, 6)]) + batch_data_samples = get_packed_inputs(batch_size=2)['data_samples'] + losses = head.loss(feats, batch_data_samples) + self.assertIsInstance(losses['loss_kpt'], torch.Tensor) + self.assertEqual(losses['loss_kpt'].shape, torch.Size(())) + self.assertIsInstance(losses['acc_pose'], torch.Tensor) + + self.assertIsInstance(losses['loss_vis'], torch.Tensor) + self.assertEqual(losses['loss_vis'].shape, torch.Size(())) + self.assertIsInstance(losses['acc_vis'], torch.Tensor) + + +if __name__ == '__main__': + unittest.main() From ce73df8dd3d99ebc89385c0eff79ca0f0c9d1ed2 Mon Sep 17 00:00:00 2001 From: Tau Date: Mon, 19 Jun 2023 15:51:32 +0800 Subject: [PATCH 36/52] [Docs] Update docs (#2471) --- README.md | 4 ++-- README_CN.md | 2 +- requirements/build.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 80a975e35a..af54288892 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ [![actions](https://github.com/open-mmlab/mmpose/workflows/build/badge.svg)](https://github.com/open-mmlab/mmpose/actions) [![codecov](https://codecov.io/gh/open-mmlab/mmpose/branch/latest/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmpose) [![PyPI](https://img.shields.io/pypi/v/mmpose)](https://pypi.org/project/mmpose/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/blob/master/LICENSE) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/blob/main/LICENSE) [![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/issues) [![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/issues) @@ -63,7 +63,7 @@ English | [简体中文](README_CN.md) MMPose is an open-source toolbox for pose estimation based on PyTorch. It is a part of the [OpenMMLab project](https://github.com/open-mmlab). -The master branch works with **PyTorch 1.8+**. +The main branch works with **PyTorch 1.8+**. https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb-84f6-24eeddbf4d91.mp4 diff --git a/README_CN.md b/README_CN.md index 2c0c725fa3..80c4b39aad 100644 --- a/README_CN.md +++ b/README_CN.md @@ -22,7 +22,7 @@ [![actions](https://github.com/open-mmlab/mmpose/workflows/build/badge.svg)](https://github.com/open-mmlab/mmpose/actions) [![codecov](https://codecov.io/gh/open-mmlab/mmpose/branch/latest/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmpose) [![PyPI](https://img.shields.io/pypi/v/mmpose)](https://pypi.org/project/mmpose/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/blob/master/LICENSE) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/blob/main/LICENSE) [![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/issues) [![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmpose.svg)](https://github.com/open-mmlab/mmpose/issues) diff --git a/requirements/build.txt b/requirements/build.txt index aa617a4ec0..fb44aadd43 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,3 +1,3 @@ # These must be installed before building mmpose numpy -torch>=1.6 +torch>=1.8 From ed79b174178f48aec1b8fbf27bc49d104763e7a2 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Wed, 21 Jun 2023 10:18:36 +0800 Subject: [PATCH 37/52] [Fix] Update to mmpretrain (#2473) --- .../coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py | 4 ++-- .../coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py | 4 ++-- .../coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py | 4 ++-- .../coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py | 4 ++-- .../td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py | 4 ++-- .../coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192.py | 4 ++-- .../td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py | 4 ++-- .../coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py | 4 ++-- configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md | 4 ++-- .../td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py | 4 ++-- .../td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py | 4 ++-- .../topdown_heatmap/humanart/vitpose_humanart.md | 4 ++-- 12 files changed, 24 insertions(+), 24 deletions(-) diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py index 13eb5f373a..9732371787 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='base', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.3, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py index 8725fa2ca0..fc08c61dff 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='base', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.3, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py index 9539de25c4..7d94f97c1b 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='huge', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.55, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py index 1953188a19..4aa2c21c1f 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='huge', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.55, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py index 8086b09410..cf875d5167 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='large', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.5, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192.py index 43d5df7154..5ba6eafb4b 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='large', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.5, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py index b57b0d3735..88bd3e43e3 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py @@ -59,7 +59,7 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch={ 'embed_dims': 384, 'num_layers': 12, @@ -71,7 +71,7 @@ qkv_bias=True, drop_path_rate=0.1, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py index 5d08a31a02..791f9b5945 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small_8xb64-210e_coco-256x192.py @@ -59,7 +59,7 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch={ 'embed_dims': 384, 'num_layers': 12, @@ -71,7 +71,7 @@ qkv_bias=True, drop_path_rate=0.1, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md index 409a5bf28b..07563f89be 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md @@ -1,7 +1,7 @@ -To utilize ViTPose, you'll need to have [MMClassification](https://github.com/open-mmlab/mmclassification). To install the required version, run the following command: +To utilize ViTPose, you'll need to have [MMPreTrain](https://github.com/open-mmlab/mmpretrain). To install the required version, run the following command: ```shell -mim install 'mmcls>=1.0.0rc6' +mim install 'mmpretrain>=1.0.0' ``` diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py index 6f08f404fb..4aa431e044 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py @@ -59,14 +59,14 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch='base', img_size=(256, 192), patch_size=16, qkv_bias=True, drop_path_rate=0.3, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py index 6daf87cc90..ed7817d2fe 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py @@ -59,7 +59,7 @@ std=[58.395, 57.12, 57.375], bgr_to_rgb=True), backbone=dict( - type='mmcls.VisionTransformer', + type='mmpretrain.VisionTransformer', arch={ 'embed_dims': 384, 'num_layers': 12, @@ -71,7 +71,7 @@ qkv_bias=True, drop_path_rate=0.1, with_cls_token=False, - output_cls_token=False, + out_type='featmap', patch_cfg=dict(padding=2), init_cfg=dict( type='Pretrained', diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md index 1e559aa4da..962dd326f3 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md @@ -1,7 +1,7 @@ -To utilize ViTPose, you'll need to have [MMClassification](https://github.com/open-mmlab/mmclassification). To install the required version, run the following command: +To utilize ViTPose, you'll need to have [MMPreTrain](https://github.com/open-mmlab/mmpretrain). To install the required version, run the following command: ```shell -mim install 'mmcls>=1.0.0rc5' +mim install 'mmpretrain>=1.0.0' ``` From a0ca076ec2ef708846e0d445640076f0bb1c06d1 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Mon, 26 Jun 2023 23:24:34 +0800 Subject: [PATCH 38/52] [Fix] Fix type bug in keypoint_pck_accuracy (#2479) --- mmpose/evaluation/functional/keypoint_eval.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmpose/evaluation/functional/keypoint_eval.py b/mmpose/evaluation/functional/keypoint_eval.py index 3c689f3b00..847faaf6d8 100644 --- a/mmpose/evaluation/functional/keypoint_eval.py +++ b/mmpose/evaluation/functional/keypoint_eval.py @@ -99,7 +99,7 @@ def keypoint_pck_accuracy(pred: np.ndarray, gt: np.ndarray, mask: np.ndarray, acc = np.array([_distance_acc(d, thr) for d in distances]) valid_acc = acc[acc >= 0] cnt = len(valid_acc) - avg_acc = valid_acc.mean() if cnt > 0 else 0 + avg_acc = valid_acc.mean() if cnt > 0 else 0.0 return acc, avg_acc, cnt From 11f29667fbea835ff151ec4d538076ee31645929 Mon Sep 17 00:00:00 2001 From: Xuan Ju <89566272+juxuan27@users.noreply.github.com> Date: Tue, 27 Jun 2023 01:49:14 +0800 Subject: [PATCH 39/52] [Enhance] Update more models trained on Human-Art Dataset (#2487) --- .../rtmpose-t_8xb256-420e_humanart-256x192.py | 233 ++++++++++++++++++ .../rtmpose/humanart/rtmpose_humanart.md | 7 + .../rtmpose/humanart/rtmpose_humanart.yml | 32 +++ .../humanart/hrnet_humanart.md | 80 ++++++ .../humanart/hrnet_humanart.yml | 74 ++++++ ...iTPose-huge_8xb64-210e_humanart-256x192.py | 150 +++++++++++ ...TPose-large_8xb64-210e_humanart-256x192.py | 150 +++++++++++ ...m_hrnet-w32_8xb64-210e_humanart-256x192.py | 150 +++++++++++ ...m_hrnet-w48_8xb32-210e_humanart-256x192.py | 150 +++++++++++ .../humanart/vitpose_humanart.md | 12 + .../humanart/vitpose_humanart.yml | 66 +++++ 11 files changed, 1104 insertions(+) create mode 100644 configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.md create mode 100755 configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.yml create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-large_8xb64-210e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py create mode 100644 configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py new file mode 100644 index 0000000000..869f04217d --- /dev/null +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py @@ -0,0 +1,233 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +max_epochs = 420 +stage2_num_epochs = 30 +base_lr = 4e-3 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + # use cosine lr from 210 to 420 epoch + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=(192, 256), + sigma=(4.9, 5.66), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=0.167, + widen_factor=0.375, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmpose/cspnext-tiny_udp-aic-coco_210e-256x192-cbed682d_20230130.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=384, + out_channels=17, + input_size=codec['input_size'], + in_featuremap_size=(6, 8), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' +data_root = 'data/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' +# })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.75, 1.25], + rotate_factor=60), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=256, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=64, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + # bbox_file=f'{data_root}HumanArt/person_detection_results/' + # 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + # Turn off EMA while training the tiny model + # dict( + # type='EMAHook', + # ema_type='ExpMomentumEMA', + # momentum=0.0002, + # update_buffers=True, + # priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md index bfd925b2c8..385ce0612a 100644 --- a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.md @@ -72,6 +72,8 @@ Results on Human-Art validation dataset with detector having human AP of 56.2 on | Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | | :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-t-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 0.161 | 0.283 | 0.154 | 0.221 | 0.373 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-coco_pt-aic-coco_420e-256x192-e613ba3f_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-coco_pt-aic-coco_420e-256x192-e613ba3f_20230127.json) | +| [rtmpose-t-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py) | 256x192 | 0.249 | 0.395 | 0.256 | 0.323 | 0.485 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.json) | | [rtmpose-s-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 0.199 | 0.328 | 0.198 | 0.261 | 0.418 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.json) | | [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.311 | 0.462 | 0.323 | 0.381 | 0.540 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | | [rtmpose-m-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 0.239 | 0.372 | 0.243 | 0.302 | 0.455 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.json) | @@ -83,6 +85,8 @@ Results on Human-Art validation dataset with ground-truth bounding-box | Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | | :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-t-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 0.444 | 0.725 | 0.453 | 0.488 | 0.750 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-coco_pt-aic-coco_420e-256x192-e613ba3f_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-coco_pt-aic-coco_420e-256x192-e613ba3f_20230127.json) | +| [rtmpose-t-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py) | 256x192 | 0.655 | 0.872 | 0.720 | 0.693 | 0.890 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.json) | | [rtmpose-s-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 0.480 | 0.739 | 0.498 | 0.521 | 0.763 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.json) | | [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.698 | 0.893 | 0.768 | 0.732 | 0.903 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | | [rtmpose-m-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 0.532 | 0.765 | 0.563 | 0.571 | 0.789 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.json) | @@ -94,6 +98,8 @@ Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 da | Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | | :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-t-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-t_8xb256-420e_coco-256x192.py) | 256x192 | 0.682 | 0.883 | 0.759 | 0.736 | 0.920 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-coco_pt-aic-coco_420e-256x192-e613ba3f_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-tiny_simcc-coco_pt-aic-coco_420e-256x192-e613ba3f_20230127.json) | +| [rtmpose-t-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py) | 256x192 | 0.665 | 0.875 | 0.739 | 0.721 | 0.916 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.json) | | [rtmpose-s-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-s_8xb256-420e_coco-256x192.py) | 256x192 | 0.716 | 0.892 | 0.789 | 0.768 | 0.929 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_simcc-coco_pt-aic-coco_420e-256x192-8edcf0d7_20230127.json) | | [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.706 | 0.888 | 0.780 | 0.759 | 0.928 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | | [rtmpose-m-coco](/configs/body_2d_keypoint/rtmpose/coco/rtmpose-m_8xb256-420e_coco-256x192.py) | 256x192 | 0.746 | 0.899 | 0.817 | 0.795 | 0.935 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco_pt-aic-coco_420e-256x192-d8dd5ca4_20230127.json) | @@ -105,6 +111,7 @@ Results on COCO val2017 with ground-truth bounding box | Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | | :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [rtmpose-t-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py) | 256x192 | 0.679 | 0.895 | 0.755 | 0.710 | 0.907 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.json) | | [rtmpose-s-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py) | 256x192 | 0.725 | 0.916 | 0.798 | 0.753 | 0.925 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.json) | | [rtmpose-m-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-m_8xb256-420e_humanart-256x192.py) | 256x192 | 0.744 | 0.916 | 0.818 | 0.770 | 0.930 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_8xb256-420e_humanart-256x192-8430627b_20230611.json) | | [rtmpose-l-humanart-coco](/configs/body_2d_keypoint/rtmpose/humanart/rtmpose-l_8xb256-420e_humanart-256x192.py) | 256x192 | 0.770 | 0.927 | 0.840 | 0.794 | 0.939 | [ckpt](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_8xb256-420e_humanart-256x192-389f2cb0_20230611.json) | diff --git a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml index f0f21b2d6f..2d6cf6ff26 100644 --- a/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml +++ b/configs/body_2d_keypoint/rtmpose/humanart/rtmpose_humanart.yml @@ -104,3 +104,35 @@ Models: AR@0.5: 0.903 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-s_8xb256-420e_humanart-256x192-5a3ac943_20230611.pth +- Config: configs/body_2d_keypoint/rtmpose/humanart/rtmpose-t_8xb256-420e_humanart-256x192.py + In Collection: RTMPose + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: rtmpose-t_8xb256-420e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.665 + AP@0.5: 0.875 + AP@0.75: 0.739 + AR: 0.721 + AR@0.5: 0.916 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.249 + AP@0.5: 0.395 + AP@0.75: 0.256 + AR: 0.323 + AR@0.5: 0.485 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.655 + AP@0.5: 0.872 + AP@0.75: 0.720 + AR: 0.693 + AR@0.5: 0.890 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-t_8xb256-420e_humanart-256x192-60b68c98_20230612.pth diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.md b/configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.md new file mode 100644 index 0000000000..6e5f3476cb --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.md @@ -0,0 +1,80 @@ + + +
    +HRNet (CVPR'2019) + +```bibtex +@inproceedings{sun2019deep, + title={Deep high-resolution representation learning for human pose estimation}, + author={Sun, Ke and Xiao, Bin and Liu, Dong and Wang, Jingdong}, + booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, + pages={5693--5703}, + year={2019} +} +``` + +
    + + + +
    +COCO (ECCV'2014) + +```bibtex +@inproceedings{lin2014microsoft, + title={Microsoft coco: Common objects in context}, + author={Lin, Tsung-Yi and Maire, Michael and Belongie, Serge and Hays, James and Perona, Pietro and Ramanan, Deva and Doll{\'a}r, Piotr and Zitnick, C Lawrence}, + booktitle={European conference on computer vision}, + pages={740--755}, + year={2014}, + organization={Springer} +} +``` + +
    + +
    +Human-Art (CVPR'2023) + +```bibtex +@inproceedings{ju2023humanart, + title={Human-Art: A Versatile Human-Centric Dataset Bridging Natural and Artificial Scenes}, + author={Ju, Xuan and Zeng, Ailing and Jianan, Wang and Qiang, Xu and Lei, Zhang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), + year={2023}} +``` + +
    + +Results on Human-Art validation dataset with detector having human AP of 56.2 on Human-Art validation dataset + +> With classic decoder + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [pose_hrnet_w32-coco](configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192.py) | 256x192 | 0.252 | 0.397 | 0.255 | 0.321 | 0.485 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192-81c58e40_20220909.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192_20220909.log) | +| [pose_hrnet_w32-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py) | 256x192 | 0.399 | 0.545 | 0.420 | 0.466 | 0.613 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.json) | +| [pose_hrnet_w48-coco](configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192.py) | 256x192 | 0.271 | 0.413 | 0.277 | 0.339 | 0.499 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192-0e67c616_20220913.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192_20220913.log) | +| [pose_hrnet_w48-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py) | 256x192 | 0.417 | 0.553 | 0.442 | 0.481 | 0.617 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.json) | + +Results on Human-Art validation dataset with ground-truth bounding-box + +> With classic decoder + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [pose_hrnet_w32-coco](configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192.py) | 256x192 | 0.533 | 0.771 | 0.562 | 0.574 | 0.792 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192-81c58e40_20220909.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192_20220909.log) | +| [pose_hrnet_w32-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py) | 256x192 | 0.754 | 0.906 | 0.812 | 0.783 | 0.916 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.json) | +| [pose_hrnet_w48-coco](configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192.py) | 256x192 | 0.557 | 0.782 | 0.593 | 0.595 | 0.804 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192-0e67c616_20220913.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192_20220913.log) | +| [pose_hrnet_w48-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py) | 256x192 | 0.769 | 0.906 | 0.825 | 0.796 | 0.919 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.json) | + +Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 dataset + +> With classic decoder + +| Arch | Input Size | AP | AP50 | AP75 | AR | AR50 | ckpt | log | +| :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | +| [pose_hrnet_w32-coco](configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192.py) | 256x192 | 0.749 | 0.906 | 0.821 | 0.804 | 0.945 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192-81c58e40_20220909.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192_20220909.log) | +| [pose_hrnet_w32-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py) | 256x192 | 0.741 | 0.902 | 0.814 | 0.795 | 0.941 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.json) | +| [pose_hrnet_w48-coco](configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192.py) | 256x192 | 0.756 | 0.908 | 0.826 | 0.809 | 0.945 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192-0e67c616_20220913.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192_20220913.log) | +| [pose_hrnet_w48-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py) | 256x192 | 0.751 | 0.905 | 0.822 | 0.805 | 0.943 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.json) | diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.yml b/configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.yml new file mode 100755 index 0000000000..08aa3f1f47 --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/hrnet_humanart.yml @@ -0,0 +1,74 @@ +Collections: +- Name: HRNet + Paper: + Title: Deep high-resolution representation learning for human pose estimation + URL: http://openaccess.thecvf.com/content_CVPR_2019/html/Sun_Deep_High-Resolution_Representation_Learning_for_Human_Pose_Estimation_CVPR_2019_paper.html + README: https://github.com/open-mmlab/mmpose/blob/main/docs/src/papers/backbones/hrnet.md +Models: +- Config: configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py + In Collection: HRNet + Metadata: + Architecture: &id001 + - HRNet + Training Data: &id002 + - COCO + - Human-Art + Name: td-hm_hrnet-w32_8xb64-210e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.741 + AP@0.5: 0.902 + AP@0.75: 0.814 + AR: 0.795 + AR@0.5: 0.941 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.399 + AP@0.5: 0.545 + AP@0.75: 0.420 + AR: 0.466 + AR@0.5: 0.613 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.754 + AP@0.5: 0.906 + AP@0.75: 0.812 + AR: 0.783 + AR@0.5: 0.916 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w32_8xb64-210e_humanart-256x192-0773ef0b_20230614.pth +- Config: configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py + In Collection: HRNet + Metadata: + Architecture: *id001 + Training Data: *id002 + Name: td-hm_hrnet-w48_8xb32-210e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.751 + AP@0.5: 0.905 + AP@0.75: 0.822 + AR: 0.805 + AR@0.5: 0.943 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.417 + AP@0.5: 0.553 + AP@0.75: 0.442 + AR: 0.481 + AR@0.5: 0.617 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.769 + AP@0.5: 0.906 + AP@0.75: 0.825 + AR: 0.796 + AR@0.5: 0.919 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_hrnet-w48_8xb32-210e_humanart-256x192-05178983_20230614.pth diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py new file mode 100644 index 0000000000..925f68e3d1 --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py @@ -0,0 +1,150 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=210, val_interval=10) + +# optimizer +custom_imports = dict( + imports=['mmpose.engine.optim_wrappers.layer_decay_optim_wrapper'], + allow_failed_imports=False) + +optim_wrapper = dict( + optimizer=dict( + type='AdamW', lr=5e-4, betas=(0.9, 0.999), weight_decay=0.1), + paramwise_cfg=dict( + num_layers=32, + layer_decay_rate=0.85, + custom_keys={ + 'bias': dict(decay_multi=0.0), + 'pos_embed': dict(decay_mult=0.0), + 'relative_position_bias_table': dict(decay_mult=0.0), + 'norm': dict(decay_mult=0.0), + }, + ), + constructor='LayerDecayOptimWrapperConstructor', + clip_grad=dict(max_norm=1., norm_type=2), +) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +# codec settings +codec = dict( + type='UDPHeatmap', input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='mmcls.VisionTransformer', + arch='huge', + img_size=(256, 192), + patch_size=16, + qkv_bias=True, + drop_path_rate=0.55, + with_cls_token=False, + output_cls_token=False, + patch_cfg=dict(padding=2), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'v1/pretrained_models/mae_pretrain_vit_huge.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=1280, + out_channels=17, + deconv_out_channels=(256, 256), + deconv_kernel_sizes=(4, 4), + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=False, + )) + +# base dataset settings +data_root = 'data/' +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=4, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + bbox_file=f'{data_root}HumanArt/person_detection_results/' + 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-large_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-large_8xb64-210e_humanart-256x192.py new file mode 100644 index 0000000000..7ea9dbf395 --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-large_8xb64-210e_humanart-256x192.py @@ -0,0 +1,150 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=210, val_interval=10) + +# optimizer +custom_imports = dict( + imports=['mmpose.engine.optim_wrappers.layer_decay_optim_wrapper'], + allow_failed_imports=False) + +optim_wrapper = dict( + optimizer=dict( + type='AdamW', lr=5e-4, betas=(0.9, 0.999), weight_decay=0.1), + paramwise_cfg=dict( + num_layers=24, + layer_decay_rate=0.8, + custom_keys={ + 'bias': dict(decay_multi=0.0), + 'pos_embed': dict(decay_mult=0.0), + 'relative_position_bias_table': dict(decay_mult=0.0), + 'norm': dict(decay_mult=0.0), + }, + ), + constructor='LayerDecayOptimWrapperConstructor', + clip_grad=dict(max_norm=1., norm_type=2), +) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +# codec settings +codec = dict( + type='UDPHeatmap', input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='mmcls.VisionTransformer', + arch='large', + img_size=(256, 192), + patch_size=16, + qkv_bias=True, + drop_path_rate=0.5, + with_cls_token=False, + output_cls_token=False, + patch_cfg=dict(padding=2), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'v1/pretrained_models/mae_pretrain_vit_large.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=1024, + out_channels=17, + deconv_out_channels=(256, 256), + deconv_kernel_sizes=(4, 4), + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=False, + )) + +# base dataset settings +data_root = 'data/' +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size'], use_udp=True), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=4, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=4, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + bbox_file=f'{data_root}HumanArt/person_detection_results/' + 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py new file mode 100644 index 0000000000..bf9fa25beb --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w32_8xb64-210e_humanart-256x192.py @@ -0,0 +1,150 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=210, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='Adam', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='coco/AP', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=32, + out_channels=17, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' +data_root = 'data/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + bbox_file=f'{data_root}HumanArt/person_detection_results/' + 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py new file mode 100644 index 0000000000..6a5ae0707c --- /dev/null +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_hrnet-w48_8xb32-210e_humanart-256x192.py @@ -0,0 +1,150 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +# runtime +train_cfg = dict(max_epochs=210, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict( + type='Adam', + lr=5e-4, +)) + +# learning policy +param_scheduler = [ + dict( + type='LinearLR', begin=0, end=500, start_factor=0.001, + by_epoch=False), # warm-up + dict( + type='MultiStepLR', + begin=0, + end=210, + milestones=[170, 200], + gamma=0.1, + by_epoch=True) +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict(checkpoint=dict(save_best='coco/AP', rule='greater')) + +# codec settings +codec = dict( + type='MSRAHeatmap', input_size=(192, 256), heatmap_size=(48, 64), sigma=2) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(48, 96)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(48, 96, 192)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(48, 96, 192, 384))), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w48-8ef0771d.pth'), + ), + head=dict( + type='HeatmapHead', + in_channels=48, + out_channels=17, + deconv_out_channels=None, + loss=dict(type='KeypointMSELoss', use_target_weight=True), + decoder=codec), + test_cfg=dict( + flip_test=True, + flip_mode='heatmap', + shift_heatmap=True, + )) + +# base dataset settings +dataset_type = 'HumanArtDataset' +data_mode = 'topdown' +data_root = 'data/' + +# pipelines +train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/training_humanart_coco.json', + data_prefix=dict(img=''), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='HumanArt/annotations/validation_humanart.json', + bbox_file=f'{data_root}HumanArt/person_detection_results/' + 'HumanArt_validation_detections_AP_H_56_person.json', + data_prefix=dict(img=''), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'HumanArt/annotations/validation_humanart.json') +test_evaluator = val_evaluator diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md index 962dd326f3..a4d2dd6c50 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.md @@ -61,6 +61,10 @@ Results on Human-Art validation dataset with detector having human AP of 56.2 on | [ViTPose-S-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py) | 256x192 | 0.381 | 0.532 | 0.405 | 0.448 | 0.602 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.json) | | [ViTPose-B-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.270 | 0.423 | 0.272 | 0.340 | 0.510 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.json) | | [ViTPose-B-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.410 | 0.549 | 0.434 | 0.475 | 0.615 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.json) | +| [ViTPose-L-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.342 | 0.498 | 0.357 | 0.413 | 0.577 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192-53609f55_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192-53609f55_20230314.json) | +| [ViTPose-L-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.459 | 0.592 | 0.487 | 0.525 | 0.656 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.json) | +| [ViTPose-H-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py) | 256x192 | 0.377 | 0.541 | 0.391 | 0.447 | 0.615 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192-e32adcd4_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192-e32adcd4_20230314.json) | +| [ViTPose-H-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py) | 256x192 | 0.468 | 0.594 | 0.498 | 0.534 | 0.655 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.json) | Results on Human-Art validation dataset with ground-truth bounding-box @@ -72,6 +76,10 @@ Results on Human-Art validation dataset with ground-truth bounding-box | [ViTPose-S-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py) | 256x192 | 0.738 | 0.905 | 0.802 | 0.768 | 0.911 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.json) | | [ViTPose-B-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.555 | 0.782 | 0.590 | 0.599 | 0.809 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.json) | | [ViTPose-B-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.759 | 0.905 | 0.823 | 0.790 | 0.917 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.json) | +| [ViTPose-L-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.637 | 0.838 | 0.689 | 0.677 | 0.859 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192-53609f55_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192-53609f55_20230314.json) | +| [ViTPose-L-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.789 | 0.916 | 0.845 | 0.819 | 0.929 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.json) | +| [ViTPose-H-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py) | 256x192 | 0.665 | 0.860 | 0.715 | 0.701 | 0.871 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192-e32adcd4_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192-e32adcd4_20230314.json) | +| [ViTPose-H-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py) | 256x192 | 0.800 | 0.926 | 0.855 | 0.828 | 0.933 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.json) | Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 dataset @@ -83,3 +91,7 @@ Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 da | [ViTPose-S-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-small_8xb64-210e_humanart-256x192.py) | 256x192 | 0.737 | 0.902 | 0.811 | 0.792 | 0.942 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-small_8xb64-210e_humanart-256x192-5cbe2bfc_20230611.json) | | [ViTPose-B-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192.py) | 256x192 | 0.757 | 0.905 | 0.829 | 0.810 | 0.946 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base_8xb64-210e_coco-256x192-216eae50_20230314.json) | | [ViTPose-B-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.758 | 0.906 | 0.829 | 0.812 | 0.946 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.json) | +| [ViTPose-L-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192.py) | 256x192 | 0.782 | 0.914 | 0.850 | 0.834 | 0.952 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192-53609f55_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large_8xb64-210e_coco-256x192-53609f55_20230314.json) | +| [ViTPose-L-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-base_8xb64-210e_humanart-256x192.py) | 256x192 | 0.782 | 0.914 | 0.849 | 0.835 | 0.953 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.json) | +| [ViTPose-H-coco](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py) | 256x192 | 0.788 | 0.917 | 0.855 | 0.839 | 0.954 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192-e32adcd4_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge_8xb64-210e_coco-256x192-e32adcd4_20230314.json) | +| [ViTPose-H-humanart-coco](configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py) | 256x192 | 0.788 | 0.914 | 0.853 | 0.841 | 0.956 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.json) | diff --git a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml index 12a557fbf6..cbbe965c2d 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml +++ b/configs/body_2d_keypoint/topdown_heatmap/humanart/vitpose_humanart.yml @@ -77,3 +77,69 @@ Models: AR@0.5: 0.917 Task: Body 2D Keypoint Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-base_8xb64-210e_humanart-256x192-b417f546_20230611.pth +- Config: configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-large_8xb64-210e_humanart-256x192.py + In Collection: ViTPose + Metadata: + Architecture: *id001 + Model Size: Large + Training Data: *id002 + Name: td-hm_ViTPose-large_8xb64-210e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.782 + AP@0.5: 0.914 + AP@0.75: 0.849 + AR: 0.835 + AR@0.5: 0.953 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.459 + AP@0.5: 0.592 + AP@0.75: 0.487 + AR: 0.525 + AR@0.5: 0.656 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.789 + AP@0.5: 0.916 + AP@0.75: 0.845 + AR: 0.819 + AR@0.5: 0.929 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-large_8xb64-210e_humanart-256x192-9aba9345_20230614.pth +- Config: configs/body_2d_keypoint/topdown_heatmap/humanart/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192.py + In Collection: ViTPose + Metadata: + Architecture: *id001 + Model Size: Huge + Training Data: *id002 + Name: td-hm_ViTPose-huge_8xb64-210e_humanart-256x192 + Results: + - Dataset: COCO + Metrics: + AP: 0.788 + AP@0.5: 0.914 + AP@0.75: 0.853 + AR: 0.841 + AR@0.5: 0.956 + Task: Body 2D Keypoint + - Dataset: Human-Art + Metrics: + AP: 0.468 + AP@0.5: 0.594 + AP@0.75: 0.498 + AR: 0.534 + AR@0.5: 0.655 + Task: Body 2D Keypoint + - Dataset: Human-Art(GT) + Metrics: + AP: 0.800 + AP@0.5: 0.926 + AP@0.75: 0.855 + AR: 0.828 + AR@0.5: 0.933 + Task: Body 2D Keypoint + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/human_art/td-hm_ViTPose-huge_8xb64-210e_humanart-256x192-603bb573_20230612.pth From 1ab662a4eac404adbce8afd4eb0a89e4f771524b Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Thu, 29 Jun 2023 12:29:33 +0800 Subject: [PATCH 40/52] [Docs] Enhance inferencer docs (#2495) --- docs/en/user_guides/inference.md | 11 +++++++++++ docs/zh_cn/user_guides/inference.md | 11 +++++++++++ 2 files changed, 22 insertions(+) diff --git a/docs/en/user_guides/inference.md b/docs/en/user_guides/inference.md index 228ba0f5e1..055f283293 100644 --- a/docs/en/user_guides/inference.md +++ b/docs/en/user_guides/inference.md @@ -91,6 +91,17 @@ The inferencer is capable of processing a range of input types, which includes t - A list of image arrays (NA for CLI tool) - A webcam (in which case the `input` parameter should be set to either `'webcam'` or `'webcam:{CAMERA_ID}'`) +Please note that when the input corresponds to multiple images, such as when the input is a video or a folder path, the inference process needs to iterate over the results generator in order to perform inference on all the frames or images within the folder. Here's an example in Python: + +```python +folder_path = 'tests/data/coco' + +result_generator = inferencer(folder_path, show=True) +results = [result for result in result_generator] +``` + +In this example, the `inferencer` takes the `folder_path` as input and returns a generator object (`result_generator`) that produces inference results. By iterating over the `result_generator` and storing each result in the `results` list, you can obtain the inference results for all the frames or images within the folder. + ### Custom Pose Estimation Models The inferencer provides several methods that can be used to customize the models employed: diff --git a/docs/zh_cn/user_guides/inference.md b/docs/zh_cn/user_guides/inference.md index a15a52841b..0844bc611f 100644 --- a/docs/zh_cn/user_guides/inference.md +++ b/docs/zh_cn/user_guides/inference.md @@ -89,6 +89,17 @@ python demo/inferencer_demo.py 'tests/data/coco/000000000785.jpg' \ - 摄像头(在这种情况下,输入参数应该设置为`webcam`或`webcam:{CAMERA_ID}`) +当输入对应于多个图像时,例如输入为**视频**或**文件夹**路径时,推理生成器必须被遍历,以便推理器对视频/文件夹中的所有帧/图像进行推理。以下是一个示例: + +```python +folder_path = 'tests/data/coco' + +result_generator = inferencer(folder_path, show=True) +results = [result for result in result_generator] +``` + +在这个示例中,`inferencer` 接受 `folder_path` 作为输入,并返回一个生成器对象(`result_generator`),用于生成推理结果。通过遍历 `result_generator` 并将每个结果存储在 `results` 列表中,您可以获得视频/文件夹中所有帧/图像的推理结果。 + ### 自定义姿态估计模型 `MMPoseInferencer`提供了几种可用于自定义所使用的模型的方法: From fe4dec72f70f16c4a34ec63a689f49360eec882b Mon Sep 17 00:00:00 2001 From: Marek Subocz Date: Thu, 29 Jun 2023 13:00:03 +0200 Subject: [PATCH 41/52] [Fix] Fix bug in mmaction2 pipeline --- mmpose/apis/inference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmpose/apis/inference.py b/mmpose/apis/inference.py index 3f674b3677..772ef17b7c 100644 --- a/mmpose/apis/inference.py +++ b/mmpose/apis/inference.py @@ -156,7 +156,7 @@ def inference_topdown(model: nn.Module, init_default_scope(scope) pipeline = Compose(model.cfg.test_dataloader.dataset.pipeline) - if bboxes is None: + if bboxes is None or len(bboxes) == 0: # get bbox from the image size if isinstance(img, str): w, h = Image.open(img).size From db345463f5fe50f3deacfbca6bb4ddbcb7023913 Mon Sep 17 00:00:00 2001 From: Tau Date: Thu, 29 Jun 2023 19:16:41 +0800 Subject: [PATCH 42/52] [Feature] Support mim download odl datasets (#2436) --- MANIFEST.in | 1 + dataset-index.yml | 71 +++ docs/en/advanced_guides/customize_datasets.md | 2 +- docs/en/user_guides/prepare_datasets.md | 66 ++- .../advanced_guides/customize_datasets.md | 265 +++++++++- docs/zh_cn/user_guides/prepare_datasets.md | 455 ++++++++---------- setup.py | 4 +- .../scripts/preprocess_300w.sh | 8 + .../scripts/preprocess_aic.sh | 7 + .../scripts/preprocess_ap10k.sh | 8 + .../scripts/preprocess_coco2017.sh | 9 + .../scripts/preprocess_crowdpose.sh | 7 + .../scripts/preprocess_freihand.sh | 7 + .../scripts/preprocess_hagrid.sh | 8 + .../scripts/preprocess_halpe.sh | 8 + .../scripts/preprocess_lapa.sh | 7 + .../scripts/preprocess_mpii.sh | 7 + .../scripts/preprocess_onehand10k.sh | 8 + .../scripts/preprocess_wflw.sh | 8 + 19 files changed, 702 insertions(+), 254 deletions(-) create mode 100644 dataset-index.yml create mode 100644 tools/dataset_converters/scripts/preprocess_300w.sh create mode 100644 tools/dataset_converters/scripts/preprocess_aic.sh create mode 100644 tools/dataset_converters/scripts/preprocess_ap10k.sh create mode 100644 tools/dataset_converters/scripts/preprocess_coco2017.sh create mode 100644 tools/dataset_converters/scripts/preprocess_crowdpose.sh create mode 100644 tools/dataset_converters/scripts/preprocess_freihand.sh create mode 100644 tools/dataset_converters/scripts/preprocess_hagrid.sh create mode 100644 tools/dataset_converters/scripts/preprocess_halpe.sh create mode 100644 tools/dataset_converters/scripts/preprocess_lapa.sh create mode 100644 tools/dataset_converters/scripts/preprocess_mpii.sh create mode 100644 tools/dataset_converters/scripts/preprocess_onehand10k.sh create mode 100644 tools/dataset_converters/scripts/preprocess_wflw.sh diff --git a/MANIFEST.in b/MANIFEST.in index 8a93c252bd..c6d3090b1c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,6 @@ include requirements/*.txt include mmpose/.mim/model-index.yml +include mmpose/.mim/dataset-index.yml recursive-include mmpose/.mim/configs *.py *.yml recursive-include mmpose/.mim/tools *.py *.sh recursive-include mmpose/.mim/demo *.py diff --git a/dataset-index.yml b/dataset-index.yml new file mode 100644 index 0000000000..a6acc57cc4 --- /dev/null +++ b/dataset-index.yml @@ -0,0 +1,71 @@ +coco2017: + dataset: COCO_2017 + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_coco2017.sh + +mpii: + dataset: MPII_Human_Pose + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_mpii.sh + +aic: + dataset: AI_Challenger + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_aic.sh + +crowdpose: + dataset: CrowdPose + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_crowdpose.sh + +halpe: + dataset: Halpe + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_halpe.sh + +lapa: + dataset: LaPa + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_lapa.sh + +300w: + dataset: 300w + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_300w.sh + +wflw: + dataset: WFLW + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_wflw.sh + +onehand10k: + dataset: OneHand10K + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_onehand10k.sh + +freihand: + dataset: FreiHAND + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_freihand.sh + +ap10k: + dataset: AP-10K + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_ap10k.sh + +hagrid: + dataset: HaGRID + download_root: data + data_root: data/pose + script: tools/dataset_converters/scripts/preprocess_hagrid.sh diff --git a/docs/en/advanced_guides/customize_datasets.md b/docs/en/advanced_guides/customize_datasets.md index 6ef43e5b0d..1aac418812 100644 --- a/docs/en/advanced_guides/customize_datasets.md +++ b/docs/en/advanced_guides/customize_datasets.md @@ -256,4 +256,4 @@ combined_dataset = dict( - **Converter transforms of sub-datasets** are applied when there exist mismatches of annotation format between sub-datasets and the combined dataset. For example, the number and order of keypoints might be different in the combined dataset and the sub-datasets. Then `KeypointConverter` can be used to unify the keypoints number and order. -- More details about `CombinedDataset` and `KeypointConverter` can be found in Advanced Guides-[Training with Mixed Datasets](../advanced_guides/mixed_datasets.md). +- More details about `CombinedDataset` and `KeypointConverter` can be found in Advanced Guides-[Training with Mixed Datasets](../user_guides/mixed_datasets.md). diff --git a/docs/en/user_guides/prepare_datasets.md b/docs/en/user_guides/prepare_datasets.md index b754767845..2f8ddcbc32 100644 --- a/docs/en/user_guides/prepare_datasets.md +++ b/docs/en/user_guides/prepare_datasets.md @@ -1,6 +1,6 @@ # Prepare Datasets -In this document, we will give a guide on the process of preparing datasets for the MMPose. Various aspects of dataset preparation will be discussed, including using built-in datasets, creating custom datasets, combining datasets for training, and browsing the dataset. +In this document, we will give a guide on the process of preparing datasets for the MMPose. Various aspects of dataset preparation will be discussed, including using built-in datasets, creating custom datasets, combining datasets for training, browsing and downloading the datasets. ## Use built-in datasets @@ -155,3 +155,67 @@ Here is a processed sample ![transformed_coco](https://user-images.githubusercontent.com/26127467/187386652-bd47335d-797c-4e8c-b823-2a4915f9812f.jpg) The heatmap target will be visualized together if it is generated in the pipeline. + +## Download dataset via MIM + +By using [OpenDataLab](https://opendatalab.com/), you can obtain free formatted datasets in various fields. Through the search function of the platform, you may address the dataset they look for quickly and easily. Using the formatted datasets from the platform, you can efficiently conduct tasks across datasets. + +If you use MIM to download, make sure that the version is greater than v0.3.8. You can use the following command to update, install, login and download the dataset: + +```shell +# upgrade your MIM +pip install -U openmim + +# install OpenDataLab CLI tools +pip install -U opendatalab +# log in OpenDataLab, registry +odl login + +# download coco2017 and preprocess by MIM +mim download mmpose --dataset coco2017 +``` + +### Supported datasets + +Here is the list of supported datasets, we will continue to update it in the future. + +#### Body + +| Dataset name | Download command | +| ------------- | ----------------------------------------- | +| COCO 2017 | `mim download mmpose --dataset coco2017` | +| MPII | `mim download mmpose --dataset mpii` | +| AI Challenger | `mim download mmpose --dataset aic` | +| CrowdPose | `mim download mmpose --dataset crowdpose` | + +#### Face + +| Dataset name | Download command | +| ------------ | ------------------------------------ | +| LaPa | `mim download mmpose --dataset lapa` | +| 300W | `mim download mmpose --dataset 300w` | +| WFLW | `mim download mmpose --dataset wflw` | + +#### Hand + +| Dataset name | Download command | +| ------------ | ------------------------------------------ | +| OneHand10K | `mim download mmpose --dataset onehand10k` | +| FreiHand | `mim download mmpose --dataset freihand` | +| HaGRID | `mim download mmpose --dataset hagrid` | + +#### Whole Body + +| Dataset name | Download command | +| ------------ | ------------------------------------- | +| Halpe | `mim download mmpose --dataset halpe` | + +#### Animal + +| Dataset name | Download command | +| ------------ | ------------------------------------- | +| AP-10K | `mim download mmpose --dataset ap10k` | + +#### Fashion + +Coming Soon diff --git a/docs/zh_cn/advanced_guides/customize_datasets.md b/docs/zh_cn/advanced_guides/customize_datasets.md index 1829c37a0c..61b58dc929 100644 --- a/docs/zh_cn/advanced_guides/customize_datasets.md +++ b/docs/zh_cn/advanced_guides/customize_datasets.md @@ -1,3 +1,264 @@ -# Customize Datasets +# 自定义数据集 -Coming soon. +MMPose 目前已支持了多个任务和相应的数据集。您可以在 [数据集](https://mmpose.readthedocs.io/zh_CN/latest/dataset_zoo.html) 找到它们。请按照相应的指南准备数据。 + + + +- [自定义数据集-将数据组织为 COCO 格式](#自定义数据集-将数据组织为-coco-格式) +- [创建自定义数据集的元信息文件](#创建自定义数据集的元信息文件) +- [创建自定义数据集类](#创建自定义数据集类) +- [创建自定义配置文件](#创建自定义配置文件) +- [数据集封装](#数据集封装) + + + +## 将数据组织为 COCO 格式 + +最简单的使用自定义数据集的方法是将您的注释格式转换为 COCO 数据集格式。 + +COCO 格式的注释 JSON 文件具有以下必要键: + +```python +'images': [ + { + 'file_name': '000000001268.jpg', + 'height': 427, + 'width': 640, + 'id': 1268 + }, + ... +], +'annotations': [ + { + 'segmentation': [[426.36, + ... + 424.34, + 223.3]], + 'keypoints': [0,0,0, + 0,0,0, + 0,0,0, + 427,220,2, + 443,222,2, + 414,228,2, + 449,232,2, + 408,248,1, + 454,261,2, + 0,0,0, + 0,0,0, + 411,287,2, + 431,287,2, + 0,0,0, + 458,265,2, + 0,0,0, + 466,300,1], + 'num_keypoints': 10, + 'area': 3894.5826, + 'iscrowd': 0, + 'image_id': 1268, + 'bbox': [402.34, 205.02, 65.26, 88.45], + 'category_id': 1, + 'id': 215218 + }, + ... +], +'categories': [ + {'id': 1, 'name': 'person'}, + ] +``` + +JSON 标注文件中有三个关键词是必需的: + +- `images`:包含所有图像信息的列表,每个图像都有一个 `file_name`、`height`、`width` 和 `id` 键。 +- `annotations`:包含所有实例标注信息的列表,每个实例都有一个 `segmentation`、`keypoints`、`num_keypoints`、`area`、`iscrowd`、`image_id`、`bbox`、`category_id` 和 `id` 键。 +- `categories`:包含所有类别信息的列表,每个类别都有一个 `id` 和 `name` 键。以人体姿态估计为例,`id` 为 1,`name` 为 `person`。 + +如果您的数据集已经是 COCO 格式的,那么您可以直接使用 `CocoDataset` 类来读取该数据集。 + +## 创建自定义数据集的元信息文件 + +对于一个新的数据集而言,您需要创建一个新的数据集元信息文件。该文件包含了数据集的基本信息,如关键点个数、排列顺序、可视化颜色、骨架连接关系等。元信息文件通常存放在 `config/_base_/datasets/` 目录下,例如: + +``` +config/_base_/datasets/custom.py +``` + +元信息文件中需要包含以下信息: + +- `keypoint_info`:每个关键点的信息: + 1. `name`: 关键点名称,必须是唯一的,例如 `nose`、`left_eye` 等。 + 2. `id`: 关键点 ID,必须是唯一的,从 0 开始。 + 3. `color`: 关键点可视化时的颜色,以 (\[B, G, R\]) 格式组织起来,用于可视化。 + 4. `type`: 关键点类型,可以是 `upper`、`lower` 或 \`\`,用于数据增强。 + 5. `swap`: 关键点交换关系,用于水平翻转数据增强。 +- `skeleton_info`:骨架连接关系,用于可视化。 +- `joint_weights`:每个关键点的权重,用于损失函数计算。 +- `sigma`:标准差,用于计算 OKS 分数,详细信息请参考 [keypoints-eval](https://cocodataset.org/#keypoints-eval)。 + +下面是一个简化版本的元信息文件([完整版](/configs/_base_/datasets/coco.py)): + +```python +dataset_info = dict( + dataset_name='coco', + paper_info=dict( + author='Lin, Tsung-Yi and Maire, Michael and ' + 'Belongie, Serge and Hays, James and ' + 'Perona, Pietro and Ramanan, Deva and ' + r'Doll{\'a}r, Piotr and Zitnick, C Lawrence', + title='Microsoft coco: Common objects in context', + container='European conference on computer vision', + year='2014', + homepage='http://cocodataset.org/', + ), + keypoint_info={ + 0: + dict(name='nose', id=0, color=[51, 153, 255], type='upper', swap=''), + 1: + dict( + name='left_eye', + id=1, + color=[51, 153, 255], + type='upper', + swap='right_eye'), + ... + 16: + dict( + name='right_ankle', + id=16, + color=[255, 128, 0], + type='lower', + swap='left_ankle') + }, + skeleton_info={ + 0: + dict(link=('left_ankle', 'left_knee'), id=0, color=[0, 255, 0]), + ... + 18: + dict( + link=('right_ear', 'right_shoulder'), id=18, color=[51, 153, 255]) + }, + joint_weights=[ + 1., 1., 1., 1., 1., 1., 1., 1.2, 1.2, 1.5, 1.5, 1., 1., 1.2, 1.2, 1.5, + 1.5 + ], + sigmas=[ + 0.026, 0.025, 0.025, 0.035, 0.035, 0.079, 0.079, 0.072, 0.072, 0.062, + 0.062, 0.107, 0.107, 0.087, 0.087, 0.089, 0.089 + ]) +``` + +## 创建自定义数据集类 + +如果标注信息不是用 COCO 格式存储的,那么您需要创建一个新的数据集类。数据集类需要继承自 `BaseDataset` 类,并且需要按照以下步骤实现: + +1. 在 `mmpose/datasets/datasets` 目录下找到该数据集符合的 package,如果没有符合的,则创建一个新的 package。 + +2. 在该 package 下创建一个新的数据集类,在对应的注册器中进行注册: + + ```python + from mmengine.dataset import BaseDataset + from mmpose.registry import DATASETS + + @DATASETS.register_module(name='MyCustomDataset') + class MyCustomDataset(BaseDataset): + ``` + + 如果未注册,你会在运行时遇到 `KeyError: 'XXXXX is not in the dataset registry'`。 + 关于 `mmengine.BaseDataset` 的更多信息,请参考 [这个文档](https://mmengine.readthedocs.io/en/latest/advanced_tutorials/basedataset.html)。 + +3. 确保你在 package 的 `__init__.py` 中导入了该数据集类。 + +4. 确保你在 `mmpose/datasets/__init__.py` 中导入了该 package。 + +## 创建自定义配置文件 + +在配置文件中,你需要修改跟数据集有关的部分,例如: + +```python +... +# 自定义数据集类 +dataset_type = 'MyCustomDataset' # or 'CocoDataset' + +train_dataloader = dict( + batch_size=2, + dataset=dict( + type=dataset_type, + data_root='root/of/your/train/data', + ann_file='path/to/your/train/json', + data_prefix=dict(img='path/to/your/train/img'), + metainfo=dict(from_file='configs/_base_/datasets/custom.py'), + ...), + ) + +val_dataloader = dict( + batch_size=2, + dataset=dict( + type=dataset_type, + data_root='root/of/your/val/data', + ann_file='path/to/your/val/json', + data_prefix=dict(img='path/to/your/val/img'), + metainfo=dict(from_file='configs/_base_/datasets/custom.py'), + ...), + ) + +test_dataloader = dict( + batch_size=2, + dataset=dict( + type=dataset_type, + data_root='root/of/your/test/data', + ann_file='path/to/your/test/json', + data_prefix=dict(img='path/to/your/test/img'), + metainfo=dict(from_file='configs/_base_/datasets/custom.py'), + ...), + ) +... +``` + +请确保所有的路径都是正确的。 + +## 数据集封装 + +目前 [MMEngine](https://github.com/open-mmlab/mmengine) 支持以下数据集封装: + +- [ConcatDataset](https://mmengine.readthedocs.io/zh_CN/latest/advanced_tutorials/basedataset.html#concatdataset) +- [RepeatDataset](https://mmengine.readthedocs.io/zh_CN/latest/advanced_tutorials/basedataset.html#repeatdataset) + +### CombinedDataset + +MMPose 提供了一个 `CombinedDataset` 类,它可以将多个数据集封装成一个数据集。它的使用方法如下: + +```python +dataset_1 = dict( + type='dataset_type_1', + data_root='root/of/your/dataset1', + data_prefix=dict(img_path='path/to/your/img'), + ann_file='annotations/train.json', + pipeline=[ + # 使用转换器将标注信息统一为需要的格式 + converter_transform_1 + ]) + +dataset_2 = dict( + type='dataset_type_2', + data_root='root/of/your/dataset2', + data_prefix=dict(img_path='path/to/your/img'), + ann_file='annotations/train.json', + pipeline=[ + converter_transform_2 + ]) + +shared_pipeline = [ + LoadImage(), + ParseImage(), +] + +combined_dataset = dict( + type='CombinedDataset', + metainfo=dict(from_file='path/to/your/metainfo'), + datasets=[dataset_1, dataset_2], + pipeline=shared_pipeline, +) +``` + +- **合并数据集的元信息** 决定了标注格式,可以是子数据集的元信息,也可以是自定义的元信息。如果要自定义元信息,可以参考 [创建自定义数据集的元信息文件](#创建自定义数据集的元信息文件)。 +- **KeypointConverter** 用于将不同的标注格式转换成统一的格式。比如将关键点个数不同、关键点排列顺序不同的数据集进行合并。 +- 更详细的说明请前往[混合数据集训练](../user_guides/mixed_datasets.md)。 diff --git a/docs/zh_cn/user_guides/prepare_datasets.md b/docs/zh_cn/user_guides/prepare_datasets.md index a10a7e4836..8b7d651e88 100644 --- a/docs/zh_cn/user_guides/prepare_datasets.md +++ b/docs/zh_cn/user_guides/prepare_datasets.md @@ -1,264 +1,221 @@ # 准备数据集 -MMPose 目前已支持了多个任务和相应的数据集。您可以在 [数据集](https://mmpose.readthedocs.io/zh_CN/latest/dataset_zoo.html) 找到它们。请按照相应的指南准备数据。 - - - -- [自定义数据集-将数据组织为 COCO 格式](#自定义数据集-将数据组织为-coco-格式) -- [创建自定义数据集的元信息文件](#创建自定义数据集的元信息文件) -- [创建自定义数据集类](#创建自定义数据集类) -- [创建自定义配置文件](#创建自定义配置文件) -- [数据集封装](#数据集封装) - - - -## 自定义数据集-将数据组织为 COCO 格式 - -最简单的使用自定义数据集的方法是将您的注释格式转换为 COCO 数据集格式。 - -COCO 格式的注释 JSON 文件具有以下必要键: - -```python -'images': [ - { - 'file_name': '000000001268.jpg', - 'height': 427, - 'width': 640, - 'id': 1268 - }, - ... -], -'annotations': [ - { - 'segmentation': [[426.36, - ... - 424.34, - 223.3]], - 'keypoints': [0,0,0, - 0,0,0, - 0,0,0, - 427,220,2, - 443,222,2, - 414,228,2, - 449,232,2, - 408,248,1, - 454,261,2, - 0,0,0, - 0,0,0, - 411,287,2, - 431,287,2, - 0,0,0, - 458,265,2, - 0,0,0, - 466,300,1], - 'num_keypoints': 10, - 'area': 3894.5826, - 'iscrowd': 0, - 'image_id': 1268, - 'bbox': [402.34, 205.02, 65.26, 88.45], - 'category_id': 1, - 'id': 215218 - }, - ... -], -'categories': [ - {'id': 1, 'name': 'person'}, - ] +在这份文档将指导如何为 MMPose 准备数据集,包括使用内置数据集、创建自定义数据集、结合数据集进行训练、浏览和下载数据集。 + +## 使用内置数据集 + +**步骤一**: 准备数据 + +MMPose 支持多种任务和相应的数据集。你可以在 [数据集仓库](https://mmpose.readthedocs.io/en/latest/dataset_zoo.html) 中找到它们。为了正确准备你的数据,请按照你选择的数据集的指南进行操作。 + +**步骤二**: 在配置文件中进行数据集设置 + +在开始训练或评估模型之前,你必须配置数据集设置。以 [`td-hm_hrnet-w32_8xb64-210e_coco-256x192.py`](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-210e_coco-256x192.py) 为例,它可以用于在 COCO 数据集上训练或评估 HRNet 姿态估计器。下面我们浏览一下数据集配置: + +- 基础数据集参数 + + ```python + # base dataset settings + dataset_type = 'CocoDataset' + data_mode = 'topdown' + data_root = 'data/coco/' + ``` + + - `dataset_type` 指定数据集的类名。用户可以参考 [数据集 API](https://mmpose.readthedocs.io/en/latest/api.html#datasets) 来找到他们想要的数据集的类名。 + - `data_mode` 决定了数据集的输出格式,有两个选项可用:`'topdown'` 和 `'bottomup'`。如果 `data_mode='topdown'`,数据元素表示一个实例及其姿态;否则,一个数据元素代表一张图像,包含多个实例和姿态。 + - `data_root` 指定数据集的根目录。 + +- 数据处理流程 + + ```python + # pipelines + train_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict(type='RandomBBoxTransform'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') + ] + val_pipeline = [ + dict(type='LoadImage'), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') + ] + ``` + + `train_pipeline` 和 `val_pipeline` 分别定义了训练和评估阶段处理数据元素的步骤。除了加载图像和打包输入之外,`train_pipeline` 主要包含数据增强技术和目标生成器,而 `val_pipeline` 则专注于将数据元素转换为统一的格式。 + +- 数据加载器 + + ```python + # data loaders + train_dataloader = dict( + batch_size=64, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_train2017.json', + data_prefix=dict(img='train2017/'), + pipeline=train_pipeline, + )) + val_dataloader = dict( + batch_size=32, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_val2017.json', + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', + data_prefix=dict(img='val2017/'), + test_mode=True, + pipeline=val_pipeline, + )) + test_dataloader = val_dataloader + ``` + + 这个部分是配置数据集的关键。除了前面讨论过的基础数据集参数和数据处理流程之外,这里还定义了其他重要的参数。`batch_size` 决定了每个 GPU 的 batch size;`ann_file` 指定了数据集的注释文件;`data_prefix` 指定了图像文件夹。`bbox_file` 仅在 top-down 数据集的 val/test 数据加载器中使用,用于提供检测到的边界框信息。 + +我们推荐从使用相同数据集的配置文件中复制数据集配置,而不是从头开始编写,以最小化潜在的错误。通过这样做,用户可以根据需要进行必要的修改,从而确保更可靠和高效的设置过程。 + +## 使用自定义数据集 + +[自定义数据集](../advanced_guides/customize_datasets.md) 指南提供了如何构建自定义数据集的详细信息。在本节中,我们将强调一些使用和配置自定义数据集的关键技巧。 + +- 确定数据集类名。如果你将数据集重组为 COCO 格式,你可以简单地使用 `CocoDataset` 作为 `dataset_type` 的值。否则,你将需要使用你添加的自定义数据集类的名称。 + +- 指定元信息配置文件。MMPose 1.x 采用了与 MMPose 0.x 不同的策略来指定元信息。在 MMPose 1.x 中,用户可以按照以下方式指定元信息配置文件: + + ```python + train_dataloader = dict( + ... + dataset=dict( + type=dataset_type, + data_root='root/of/your/train/data', + ann_file='path/to/your/train/json', + data_prefix=dict(img='path/to/your/train/img'), + # specify dataset meta information + metainfo=dict(from_file='configs/_base_/datasets/custom.py'), + ...), + ) + ``` + + 注意,`metainfo` 参数必须在 val/test 数据加载器中指定。 + +## 使用混合数据集进行训练 + +MMPose 提供了一个方便且多功能的解决方案,用于训练混合数据集。请参考[混合数据集训练](./mixed_datasets.md)。 + +## 浏览数据集 + +`tools/analysis_tools/browse_dataset.py` 帮助用户可视化地浏览姿态数据集,或将图像保存到指定的目录。 + +```shell +python tools/misc/browse_dataset.py ${CONFIG} [-h] [--output-dir ${OUTPUT_DIR}] [--not-show] [--phase ${PHASE}] [--mode ${MODE}] [--show-interval ${SHOW_INTERVAL}] ``` -JSON 标注文件中有三个关键词是必需的: +| ARGS | Description | +| -------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `CONFIG` | 配置文件的路径 | +| `--output-dir OUTPUT_DIR` | 保存可视化结果的目标文件夹。如果不指定,可视化的结果将不会被保存 | +| `--not-show` | 不适用外部窗口显示可视化的结果 | +| `--phase {train, val, test}` | 数据集选项 | +| `--mode {original, transformed}` | 指定可视化图片类型。 `original` 为不使用数据增强的原始图片及标注可视化; `transformed` 为经过增强后的可视化 | +| `--show-interval SHOW_INTERVAL` | 显示图片的时间间隔 | -- `images`:包含所有图像信息的列表,每个图像都有一个 `file_name`、`height`、`width` 和 `id` 键。 -- `annotations`:包含所有实例标注信息的列表,每个实例都有一个 `segmentation`、`keypoints`、`num_keypoints`、`area`、`iscrowd`、`image_id`、`bbox`、`category_id` 和 `id` 键。 -- `categories`:包含所有类别信息的列表,每个类别都有一个 `id` 和 `name` 键。以人体姿态估计为例,`id` 为 1,`name` 为 `person`。 +例如,用户想要可视化 COCO 数据集中的图像和标注,可以使用: -如果您的数据集已经是 COCO 格式的,那么您可以直接使用 `CocoDataset` 类来读取该数据集。 +```shell +python tools/misc/browse_dataset.py configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-e210_coco-256x192.py --mode original +``` -## 创建自定义数据集的元信息文件 +检测框和关键点将被绘制在原始图像上。下面是一个例子: +![original_coco](https://user-images.githubusercontent.com/26127467/187383698-7e518f21-b4cc-4712-9e97-99ddd8f0e437.jpg) -对于一个新的数据集而言,您需要创建一个新的数据集元信息文件。该文件包含了数据集的基本信息,如关键点个数、排列顺序、可视化颜色、骨架连接关系等。元信息文件通常存放在 `config/_base_/datasets/` 目录下,例如: +原始图像在被输入模型之前需要被处理。为了可视化预处理后的图像和标注,用户需要将参数 `mode` 修改为 `transformed`。例如: -``` -config/_base_/datasets/custom.py +```shell +python tools/misc/browse_dataset.py configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_8xb64-e210_coco-256x192.py --mode transformed ``` -元信息文件中需要包含以下信息: - -- `keypoint_info`:每个关键点的信息: - 1. `name`: 关键点名称,必须是唯一的,例如 `nose`、`left_eye` 等。 - 2. `id`: 关键点 ID,必须是唯一的,从 0 开始。 - 3. `color`: 关键点可视化时的颜色,以 (\[B, G, R\]) 格式组织起来,用于可视化。 - 4. `type`: 关键点类型,可以是 `upper`、`lower` 或 \`\`,用于数据增强。 - 5. `swap`: 关键点交换关系,用于水平翻转数据增强。 -- `skeleton_info`:骨架连接关系,用于可视化。 -- `joint_weights`:每个关键点的权重,用于损失函数计算。 -- `sigma`:标准差,用于计算 OKS 分数,详细信息请参考 [keypoints-eval](https://cocodataset.org/#keypoints-eval)。 - -下面是一个简化版本的元信息文件([完整版](/configs/_base_/datasets/coco.py)): - -```python -dataset_info = dict( - dataset_name='coco', - paper_info=dict( - author='Lin, Tsung-Yi and Maire, Michael and ' - 'Belongie, Serge and Hays, James and ' - 'Perona, Pietro and Ramanan, Deva and ' - r'Doll{\'a}r, Piotr and Zitnick, C Lawrence', - title='Microsoft coco: Common objects in context', - container='European conference on computer vision', - year='2014', - homepage='http://cocodataset.org/', - ), - keypoint_info={ - 0: - dict(name='nose', id=0, color=[51, 153, 255], type='upper', swap=''), - 1: - dict( - name='left_eye', - id=1, - color=[51, 153, 255], - type='upper', - swap='right_eye'), - ... - 16: - dict( - name='right_ankle', - id=16, - color=[255, 128, 0], - type='lower', - swap='left_ankle') - }, - skeleton_info={ - 0: - dict(link=('left_ankle', 'left_knee'), id=0, color=[0, 255, 0]), - ... - 18: - dict( - link=('right_ear', 'right_shoulder'), id=18, color=[51, 153, 255]) - }, - joint_weights=[ - 1., 1., 1., 1., 1., 1., 1., 1.2, 1.2, 1.5, 1.5, 1., 1., 1.2, 1.2, 1.5, - 1.5 - ], - sigmas=[ - 0.026, 0.025, 0.025, 0.035, 0.035, 0.079, 0.079, 0.072, 0.072, 0.062, - 0.062, 0.107, 0.107, 0.087, 0.087, 0.089, 0.089 - ]) -``` +这是一个处理后的样本: -## 创建自定义数据集类 - -如果标注信息不是用 COCO 格式存储的,那么您需要创建一个新的数据集类。数据集类需要继承自 `BaseDataset` 类,并且需要按照以下步骤实现: - -1. 在 `mmpose/datasets/datasets` 目录下找到该数据集符合的 package,如果没有符合的,则创建一个新的 package。 - -2. 在该 package 下创建一个新的数据集类,在对应的注册器中进行注册: - - ```python - from mmengine.dataset import BaseDataset - from mmpose.registry import DATASETS - - @DATASETS.register_module(name='MyCustomDataset') - class MyCustomDataset(BaseDataset): - ``` - - 如果未注册,你会在运行时遇到 `KeyError: 'XXXXX is not in the dataset registry'`。 - 关于 `mmengine.BaseDataset` 的更多信息,请参考 [这个文档](https://mmengine.readthedocs.io/en/latest/advanced_tutorials/basedataset.html)。 - -3. 确保你在 package 的 `__init__.py` 中导入了该数据集类。 - -4. 确保你在 `mmpose/datasets/__init__.py` 中导入了该 package。 - -## 创建自定义配置文件 - -在配置文件中,你需要修改跟数据集有关的部分,例如: - -```python -... -# 自定义数据集类 -dataset_type = 'MyCustomDataset' # or 'CocoDataset' - -train_dataloader = dict( - batch_size=2, - dataset=dict( - type=dataset_type, - data_root='root/of/your/train/data', - ann_file='path/to/your/train/json', - data_prefix=dict(img='path/to/your/train/img'), - metainfo=dict(from_file='configs/_base_/datasets/custom.py'), - ...), - ) - -val_dataloader = dict( - batch_size=2, - dataset=dict( - type=dataset_type, - data_root='root/of/your/val/data', - ann_file='path/to/your/val/json', - data_prefix=dict(img='path/to/your/val/img'), - metainfo=dict(from_file='configs/_base_/datasets/custom.py'), - ...), - ) - -test_dataloader = dict( - batch_size=2, - dataset=dict( - type=dataset_type, - data_root='root/of/your/test/data', - ann_file='path/to/your/test/json', - data_prefix=dict(img='path/to/your/test/img'), - metainfo=dict(from_file='configs/_base_/datasets/custom.py'), - ...), - ) -... -``` +![transformed_coco](https://user-images.githubusercontent.com/26127467/187386652-bd47335d-797c-4e8c-b823-2a4915f9812f.jpg) + +热图目标将与之一起可视化,如果它是在 pipeline 中生成的。 + +## 用 MIM 下载数据集 -请确保所有的路径都是正确的。 - -## 数据集封装 - -目前 [MMEngine](https://github.com/open-mmlab/mmengine) 支持以下数据集封装: - -- [ConcatDataset](https://mmengine.readthedocs.io/zh_CN/latest/advanced_tutorials/basedataset.html#concatdataset) -- [RepeatDataset](https://mmengine.readthedocs.io/zh_CN/latest/advanced_tutorials/basedataset.html#repeatdataset) - -### CombinedDataset - -MMPose 提供了一个 `CombinedDataset` 类,它可以将多个数据集封装成一个数据集。它的使用方法如下: - -```python -dataset_1 = dict( - type='dataset_type_1', - data_root='root/of/your/dataset1', - data_prefix=dict(img_path='path/to/your/img'), - ann_file='annotations/train.json', - pipeline=[ - # 使用转换器将标注信息统一为需要的格式 - converter_transform_1 - ]) - -dataset_2 = dict( - type='dataset_type_2', - data_root='root/of/your/dataset2', - data_prefix=dict(img_path='path/to/your/img'), - ann_file='annotations/train.json', - pipeline=[ - converter_transform_2 - ]) - -shared_pipeline = [ - LoadImage(), - ParseImage(), -] - -combined_dataset = dict( - type='CombinedDataset', - metainfo=dict(from_file='path/to/your/metainfo'), - datasets=[dataset_1, dataset_2], - pipeline=shared_pipeline, -) +通过使用 [OpenDataLab](https://opendatalab.com/),您可以直接下载开源数据集。通过平台的搜索功能,您可以快速轻松地找到他们正在寻找的数据集。使用平台上的格式化数据集,您可以高效地跨数据集执行任务。 + +如果您使用 MIM 下载,请确保版本大于 v0.3.8。您可以使用以下命令进行更新、安装、登录和数据集下载: + +```shell +# upgrade your MIM +pip install -U openmim + +# install OpenDataLab CLI tools +pip install -U opendatalab +# log in OpenDataLab, registry +odl login + +# download coco2017 and preprocess by MIM +mim download mmpose --dataset coco2017 ``` -- **合并数据集的元信息** 决定了标注格式,可以是子数据集的元信息,也可以是自定义的元信息。如果要自定义元信息,可以参考 [创建自定义数据集的元信息文件](#创建自定义数据集的元信息文件)。 -- **KeypointConverter** 用于将不同的标注格式转换成统一的格式。比如将关键点个数不同、关键点排列顺序不同的数据集进行合并。 -- 更详细的说明请前往进阶教程-[混合数据集训练](../advanced_guides/mixed_datasets.md)。 +### 已支持的数据集 + +下面是支持的数据集列表,更多数据集将在之后持续更新: + +#### 人体数据集 + +| Dataset name | Download command | +| ------------- | ----------------------------------------- | +| COCO 2017 | `mim download mmpose --dataset coco2017` | +| MPII | `mim download mmpose --dataset mpii` | +| AI Challenger | `mim download mmpose --dataset aic` | +| CrowdPose | `mim download mmpose --dataset crowdpose` | + +#### 人脸数据集 + +| Dataset name | Download command | +| ------------ | ------------------------------------ | +| LaPa | `mim download mmpose --dataset lapa` | +| 300W | `mim download mmpose --dataset 300w` | +| WFLW | `mim download mmpose --dataset wflw` | + +#### 手部数据集 + +| Dataset name | Download command | +| ------------ | ------------------------------------------ | +| OneHand10K | `mim download mmpose --dataset onehand10k` | +| FreiHand | `mim download mmpose --dataset freihand` | +| HaGRID | `mim download mmpose --dataset hagrid` | + +#### 全身数据集 + +| Dataset name | Download command | +| ------------ | ------------------------------------- | +| Halpe | `mim download mmpose --dataset halpe` | + +#### 动物数据集 + +| Dataset name | Download command | +| ------------ | ------------------------------------- | +| AP-10K | `mim download mmpose --dataset ap10k` | + +#### 服装数据集 + +Coming Soon diff --git a/setup.py b/setup.py index 7222188e2f..c26bea7866 100644 --- a/setup.py +++ b/setup.py @@ -128,7 +128,9 @@ def add_mim_extension(): else: return - filenames = ['tools', 'configs', 'demo', 'model-index.yml'] + filenames = [ + 'tools', 'configs', 'demo', 'model-index.yml', 'dataset-index.yml' + ] repo_path = osp.dirname(__file__) mim_path = osp.join(repo_path, 'mmpose', '.mim') os.makedirs(mim_path, exist_ok=True) diff --git a/tools/dataset_converters/scripts/preprocess_300w.sh b/tools/dataset_converters/scripts/preprocess_300w.sh new file mode 100644 index 0000000000..bf405b5cc7 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_300w.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/300w/raw/300w.tar.gz.00 -C $DOWNLOAD_DIR/ +tar -xvf $DOWNLOAD_DIR/300w/300w.tar.00 -C $DATA_ROOT/ +rm -rf $DOWNLOAD_DIR/300w diff --git a/tools/dataset_converters/scripts/preprocess_aic.sh b/tools/dataset_converters/scripts/preprocess_aic.sh new file mode 100644 index 0000000000..726a61ca26 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_aic.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/AI_Challenger/raw/AI_Challenger.tar.gz -C $DATA_ROOT +rm -rf $DOWNLOAD_DIR/AI_Challenger diff --git a/tools/dataset_converters/scripts/preprocess_ap10k.sh b/tools/dataset_converters/scripts/preprocess_ap10k.sh new file mode 100644 index 0000000000..a4c330157b --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_ap10k.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/AP-10K/raw/AP-10K.tar.gz.00 -C $DOWNLOAD_DIR/ +tar -xvf $DOWNLOAD_DIR/AP-10K/AP-10K.tar.00 -C $DATA_ROOT/ +rm -rf $DOWNLOAD_DIR/AP-10K diff --git a/tools/dataset_converters/scripts/preprocess_coco2017.sh b/tools/dataset_converters/scripts/preprocess_coco2017.sh new file mode 100644 index 0000000000..853975e26b --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_coco2017.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +unzip $DOWNLOAD_DIR/COCO_2017/raw/Images/val2017.zip -d $DATA_ROOT +unzip $DOWNLOAD_DIR/COCO_2017/raw/Images/train2017.zip -d $DATA_ROOT +unzip $DOWNLOAD_DIR/COCO_2017/raw/Annotations/annotations_trainval2017.zip -d $DATA_ROOT +rm -rf $DOWNLOAD_DIR/COCO_2017 diff --git a/tools/dataset_converters/scripts/preprocess_crowdpose.sh b/tools/dataset_converters/scripts/preprocess_crowdpose.sh new file mode 100644 index 0000000000..3215239585 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_crowdpose.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/CrowdPose/raw/CrowdPose.tar.gz -C $DATA_ROOT +rm -rf $DOWNLOAD_DIR/CrowdPose diff --git a/tools/dataset_converters/scripts/preprocess_freihand.sh b/tools/dataset_converters/scripts/preprocess_freihand.sh new file mode 100644 index 0000000000..b3567cb5d7 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_freihand.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/FreiHAND/raw/FreiHAND.tar.gz -C $DATA_ROOT +rm -rf $DOWNLOAD_DIR/FreiHAND diff --git a/tools/dataset_converters/scripts/preprocess_hagrid.sh b/tools/dataset_converters/scripts/preprocess_hagrid.sh new file mode 100644 index 0000000000..de2356541c --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_hagrid.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +cat $DOWNLOAD_DIR/HaGRID/raw/*.tar.gz.* | tar -xvz -C $DATA_ROOT/.. +tar -xvf $DATA_ROOT/HaGRID.tar -C $DATA_ROOT/.. +rm -rf $DOWNLOAD_DIR/HaGRID diff --git a/tools/dataset_converters/scripts/preprocess_halpe.sh b/tools/dataset_converters/scripts/preprocess_halpe.sh new file mode 100644 index 0000000000..103d6202f9 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_halpe.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/Halpe/raw/Halpe.tar.gz.00 -C $DOWNLOAD_DIR/ +tar -xvf $DOWNLOAD_DIR/Halpe/Halpe.tar.00 -C $DATA_ROOT/ +rm -rf $DOWNLOAD_DIR/Halpe diff --git a/tools/dataset_converters/scripts/preprocess_lapa.sh b/tools/dataset_converters/scripts/preprocess_lapa.sh new file mode 100644 index 0000000000..977442c1b8 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_lapa.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/LaPa/raw/LaPa.tar.gz -C $DATA_ROOT +rm -rf $DOWNLOAD_DIR/LaPa diff --git a/tools/dataset_converters/scripts/preprocess_mpii.sh b/tools/dataset_converters/scripts/preprocess_mpii.sh new file mode 100644 index 0000000000..287b431897 --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_mpii.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/MPII_Human_Pose/raw/MPII_Human_Pose.tar.gz -C $DATA_ROOT +rm -rf $DOWNLOAD_DIR/MPII_Human_Pose diff --git a/tools/dataset_converters/scripts/preprocess_onehand10k.sh b/tools/dataset_converters/scripts/preprocess_onehand10k.sh new file mode 100644 index 0000000000..47f6e8942c --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_onehand10k.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/OneHand10K/raw/OneHand10K.tar.gz.00 -C $DOWNLOAD_DIR/ +tar -xvf $DOWNLOAD_DIR/OneHand10K/OneHand10K.tar.00 -C $DATA_ROOT/ +rm -rf $DOWNLOAD_DIR/OneHand10K diff --git a/tools/dataset_converters/scripts/preprocess_wflw.sh b/tools/dataset_converters/scripts/preprocess_wflw.sh new file mode 100644 index 0000000000..723d1d158e --- /dev/null +++ b/tools/dataset_converters/scripts/preprocess_wflw.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +DOWNLOAD_DIR=$1 +DATA_ROOT=$2 + +tar -zxvf $DOWNLOAD_DIR/WFLW/raw/WFLW.tar.gz.00 -C $DOWNLOAD_DIR/ +tar -xvf $DOWNLOAD_DIR/WFLW/WFLW.tar.00 -C $DATA_ROOT/ +rm -rf $DOWNLOAD_DIR/WFLW From e1a6874a2b377db9e7a98d29c691ec9f750434df Mon Sep 17 00:00:00 2001 From: Tau Date: Fri, 30 Jun 2023 10:19:24 +0800 Subject: [PATCH 43/52] [Feature] Update RTMPose-x wholebody and body models (#2498) --- projects/rtmpose/README.md | 2 + projects/rtmpose/README_CN.md | 2 + .../rtmpose-x_8xb256-700e_coco-384x288.py | 238 ++++++++++++++++++ ...ose-x_8xb32-270e_coco-wholebody-384x288.py | 233 +++++++++++++++++ 4 files changed, 475 insertions(+) create mode 100644 projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_coco-384x288.py create mode 100644 projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-x_8xb32-270e_coco-wholebody-384x288.py diff --git a/projects/rtmpose/README.md b/projects/rtmpose/README.md index 744680ed84..dc5b0dbe23 100644 --- a/projects/rtmpose/README.md +++ b/projects/rtmpose/README.md @@ -196,6 +196,7 @@ Feel free to join our community group for more help: | [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | | [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | | [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | +| [RTMPose-x\*](./rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_coco-384x288.py) | 384x288 | 78.8 | - | - | 49.43 | 17.22 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-body7_pt-body7_700e-384x288-71d7b7e9_20230629.pth) |
    @@ -237,6 +238,7 @@ For more details, please refer to [GroupFisher Pruning for RTMPose](./rtmpose/pr | [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 58.2 | 67.4 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | | [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 61.1 | 70.0 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | | [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 64.8 | 73.0 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | +| [RTMPose-x](./rtmpose/wholebody_2d_keypoint/rtmpose-x_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 65.3 | 73.3 | 18.1 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-coco-wholebody_pt-body7_270e-384x288-401dfc90_20230629.pth) | ### Animal 2d (17 Keypoints) diff --git a/projects/rtmpose/README_CN.md b/projects/rtmpose/README_CN.md index 6e33d1a2ee..30bddf9ecd 100644 --- a/projects/rtmpose/README_CN.md +++ b/projects/rtmpose/README_CN.md @@ -187,6 +187,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 | [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-256x192.py) | 256x192 | 76.7 | 95.08 | 70.14 | 27.66 | 4.16 | 18.85 | 3.46 | 45.37 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-256x192-4dba18fc_20230504.pth) | | [RTMPose-m\*](./rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-384x288.py) | 384x288 | 76.6 | 94.64 | 70.38 | 13.72 | 4.33 | 24.78 | 3.66 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-body7_pt-body7_420e-384x288-65e718c4_20230504.pth) | | [RTMPose-l\*](./rtmpose/body_2d_keypoint/rtmpose-l_8xb256-420e_coco-384x288.py) | 384x288 | 78.3 | 95.36 | 71.58 | 27.79 | 9.35 | - | 6.05 | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-body7_pt-body7_420e-384x288-3f5a1437_20230504.pth) | +| [RTMPose-x\*](./rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_coco-384x288.py) | 384x288 | 78.8 | - | - | 49.43 | 17.22 | - | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-body7_pt-body7_700e-384x288-71d7b7e9_20230629.pth) |
    @@ -228,6 +229,7 @@ RTMPose 是一个长期优化迭代的项目,致力于业务场景下的高性 | [RTMPose-m](./rtmpose/wholebody_2d_keypoint/rtmpose-m_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 58.2 | 67.4 | 2.22 | 13.50 | 4.00 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody_pt-aic-coco_270e-256x192-cd5e845c_20230123.pth) | | [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb64-270e_coco-wholebody-256x192.py) | 256x192 | 61.1 | 70.0 | 4.52 | 23.41 | 5.67 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-256x192-6f206314_20230124.pth) | | [RTMPose-l](./rtmpose/wholebody_2d_keypoint/rtmpose-l_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 64.8 | 73.0 | 10.07 | 44.58 | 7.68 | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-l_simcc-coco-wholebody_pt-aic-coco_270e-384x288-eaeb96c8_20230125.pth) | +| [RTMPose-x](./rtmpose/wholebody_2d_keypoint/rtmpose-x_8xb32-270e_coco-wholebody-384x288.py) | 384x288 | 65.3 | 73.3 | 18.1 | - | - | [Model](https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-x_simcc-coco-wholebody_pt-body7_270e-384x288-401dfc90_20230629.pth) | ### 动物 2d 关键点 (17 Keypoints) diff --git a/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_coco-384x288.py b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_coco-384x288.py new file mode 100644 index 0000000000..1441e07791 --- /dev/null +++ b/projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-x_8xb256-700e_coco-384x288.py @@ -0,0 +1,238 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 17 +input_size = (288, 384) + +# runtime +max_epochs = 700 +stage2_num_epochs = 20 +base_lr = 4e-3 +train_batch_size = 256 +val_batch_size = 64 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=1024) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1.33, + widen_factor=1.28, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-x_udp-body7_210e-384x288-d28b58e6_20230529.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1280, + out_channels=num_keypoints, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True)) + +# base dataset settings +dataset_type = 'CocoDataset' +data_mode = 'topdown' +data_root = 'data/coco/' + +backend_args = dict(backend='local') +# backend_args = dict( +# backend='petrel', +# path_mapping=dict({ +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/', +# f'{data_root}': 's3://openmmlab/datasets/detection/coco/' +# })) + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PhotometricDistortion'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_train2017.json', + data_prefix=dict(img='train2017/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/person_keypoints_val2017.json', + bbox_file=f'{data_root}person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', + data_prefix=dict(img='val2017/'), + test_mode=True, + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='CocoMetric', + ann_file=data_root + 'annotations/person_keypoints_val2017.json') +test_evaluator = val_evaluator diff --git a/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-x_8xb32-270e_coco-wholebody-384x288.py b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-x_8xb32-270e_coco-wholebody-384x288.py new file mode 100644 index 0000000000..429016e825 --- /dev/null +++ b/projects/rtmpose/rtmpose/wholebody_2d_keypoint/rtmpose-x_8xb32-270e_coco-wholebody-384x288.py @@ -0,0 +1,233 @@ +_base_ = ['mmpose::_base_/default_runtime.py'] + +# common setting +num_keypoints = 133 +input_size = (288, 384) + +# runtime +max_epochs = 270 +stage2_num_epochs = 30 +base_lr = 4e-3 +train_batch_size = 32 +val_batch_size = 32 + +train_cfg = dict(max_epochs=max_epochs, val_interval=10) +randomness = dict(seed=21) + +# optimizer +optim_wrapper = dict( + type='OptimWrapper', + optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.05), + clip_grad=dict(max_norm=35, norm_type=2), + paramwise_cfg=dict( + norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True)) + +# learning rate +param_scheduler = [ + dict( + type='LinearLR', + start_factor=1.0e-5, + by_epoch=False, + begin=0, + end=1000), + dict( + type='CosineAnnealingLR', + eta_min=base_lr * 0.05, + begin=max_epochs // 2, + end=max_epochs, + T_max=max_epochs // 2, + by_epoch=True, + convert_to_iter_based=True), +] + +# automatically scaling LR based on the actual training batch size +auto_scale_lr = dict(base_batch_size=512) + +# codec settings +codec = dict( + type='SimCCLabel', + input_size=input_size, + sigma=(6., 6.93), + simcc_split_ratio=2.0, + normalize=False, + use_dark=False) + +# model settings +model = dict( + type='TopdownPoseEstimator', + data_preprocessor=dict( + type='PoseDataPreprocessor', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + bgr_to_rgb=True), + backbone=dict( + _scope_='mmdet', + type='CSPNeXt', + arch='P5', + expand_ratio=0.5, + deepen_factor=1.33, + widen_factor=1.25, + out_indices=(4, ), + channel_attention=True, + norm_cfg=dict(type='SyncBN'), + act_cfg=dict(type='SiLU'), + init_cfg=dict( + type='Pretrained', + prefix='backbone.', + checkpoint='https://download.openmmlab.com/mmpose/v1/projects/' + 'rtmposev1/cspnext-x_udp-body7_210e-384x288-d28b58e6_20230529.pth' # noqa + )), + head=dict( + type='RTMCCHead', + in_channels=1280, + out_channels=num_keypoints, + input_size=codec['input_size'], + in_featuremap_size=tuple([s // 32 for s in codec['input_size']]), + simcc_split_ratio=codec['simcc_split_ratio'], + final_layer_kernel_size=7, + gau_cfg=dict( + hidden_dims=256, + s=128, + expansion_factor=2, + dropout_rate=0., + drop_path=0., + act_fn='SiLU', + use_rel_bias=False, + pos_enc=False), + loss=dict( + type='KLDiscretLoss', + use_target_weight=True, + beta=10., + label_softmax=True), + decoder=codec), + test_cfg=dict(flip_test=True, )) + +# base dataset settings +dataset_type = 'CocoWholeBodyDataset' +data_mode = 'topdown' +data_root = 'data/coco/' + +backend_args = dict(backend='local') + +# pipelines +train_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', scale_factor=[0.5, 1.5], rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=1.0), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] +val_pipeline = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='PackPoseInputs') +] + +train_pipeline_stage2 = [ + dict(type='LoadImage', backend_args=backend_args), + dict(type='GetBBoxCenterScale'), + dict(type='RandomFlip', direction='horizontal'), + dict(type='RandomHalfBody'), + dict( + type='RandomBBoxTransform', + shift_factor=0., + scale_factor=[0.5, 1.5], + rotate_factor=90), + dict(type='TopdownAffine', input_size=codec['input_size']), + dict(type='mmdet.YOLOXHSVRandomAug'), + dict( + type='Albumentation', + transforms=[ + dict(type='Blur', p=0.1), + dict(type='MedianBlur', p=0.1), + dict( + type='CoarseDropout', + max_holes=1, + max_height=0.4, + max_width=0.4, + min_holes=1, + min_height=0.2, + min_width=0.2, + p=0.5), + ]), + dict(type='GenerateTarget', encoder=codec), + dict(type='PackPoseInputs') +] + +# data loaders +train_dataloader = dict( + batch_size=train_batch_size, + num_workers=10, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/coco_wholebody_train_v1.0.json', + data_prefix=dict(img='train2017/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=val_batch_size, + num_workers=10, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + data_root=data_root, + data_mode=data_mode, + ann_file='annotations/coco_wholebody_val_v1.0.json', + data_prefix=dict(img='val2017/'), + test_mode=True, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', + pipeline=val_pipeline, + )) +test_dataloader = val_dataloader + +# hooks +default_hooks = dict( + checkpoint=dict( + save_best='coco-wholebody/AP', rule='greater', max_keep_ckpts=1)) + +custom_hooks = [ + dict( + type='EMAHook', + ema_type='ExpMomentumEMA', + momentum=0.0002, + update_buffers=True, + priority=49), + dict( + type='mmdet.PipelineSwitchHook', + switch_epoch=max_epochs - stage2_num_epochs, + switch_pipeline=train_pipeline_stage2) +] + +# evaluators +val_evaluator = dict( + type='CocoWholeBodyMetric', + ann_file=data_root + 'annotations/coco_wholebody_val_v1.0.json') +test_evaluator = val_evaluator From c40a2d40ccd6f7166c79378c06d8dc8ea14acb1c Mon Sep 17 00:00:00 2001 From: Yifan Lareina WU Date: Fri, 30 Jun 2023 11:09:53 +0800 Subject: [PATCH 44/52] [Fix] 3d pose demo with multiple instances (#2483) --- demo/body3d_pose_lifter_demo.py | 12 +++++++++++- demo/docs/en/3d_human_pose_demo.md | 1 + mmpose/visualization/local_visualizer_3d.py | 16 +++++++++++++--- 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/demo/body3d_pose_lifter_demo.py b/demo/body3d_pose_lifter_demo.py index 0a29973501..840cd4edc9 100644 --- a/demo/body3d_pose_lifter_demo.py +++ b/demo/body3d_pose_lifter_demo.py @@ -70,6 +70,13 @@ def parse_args(): 'scale of the dataset, and move the bbox (along with the 2D pose) to ' 'the average bbox center of the dataset. This is useful when bbox ' 'is small, especially in multi-person scenarios.') + parser.add_argument( + '--num-instances', + type=int, + default=-1, + help='The number of 3D poses to be visualized in every frame. If ' + 'less than 0, it will be set to the number of pose results in the ' + 'first frame.') parser.add_argument( '--output-root', type=str, @@ -227,7 +234,6 @@ def get_pose_lift_results(args, visualizer, pose_lifter, pose_est_results_list, pred_instances = pose_lift_res.pred_instances keypoints = pred_instances.keypoints - # print(keypoints) keypoint_scores = pred_instances.keypoint_scores if keypoint_scores.ndim == 3: keypoint_scores = np.squeeze(keypoint_scores, axis=1) @@ -253,6 +259,9 @@ def get_pose_lift_results(args, visualizer, pose_lifter, pose_est_results_list, pred_3d_data_samples = merge_data_samples(pose_lift_results) det_data_sample = merge_data_samples(pose_est_results) + if args.num_instances < 0: + args.num_instances = len(pose_lift_results) + # Visualization if visualizer is not None: visualizer.add_datasample( @@ -264,6 +273,7 @@ def get_pose_lift_results(args, visualizer, pose_lifter, pose_est_results_list, show=args.show, draw_bbox=True, kpt_thr=args.kpt_thr, + num_instances=args.num_instances, wait_time=args.show_interval) return pred_3d_data_samples.get('pred_instances', None) diff --git a/demo/docs/en/3d_human_pose_demo.md b/demo/docs/en/3d_human_pose_demo.md index d219b8683a..96fb0137af 100644 --- a/demo/docs/en/3d_human_pose_demo.md +++ b/demo/docs/en/3d_human_pose_demo.md @@ -20,6 +20,7 @@ ${MMPOSE_CHECKPOINT_FILE_3D} \ [--show] \ [--rebase-keypoint-height] \ [--norm-pose-2d] \ +[--num-instances] \ [--output-root ${OUT_VIDEO_ROOT}] \ [--save-predictions] [--save-predictions] \ diff --git a/mmpose/visualization/local_visualizer_3d.py b/mmpose/visualization/local_visualizer_3d.py index 569f69c724..7e3462ce79 100644 --- a/mmpose/visualization/local_visualizer_3d.py +++ b/mmpose/visualization/local_visualizer_3d.py @@ -126,9 +126,11 @@ def _draw_3d_data_samples( num_instances = 0 else: if len(pred_instances) > num_instances: + pred_instances_ = InstanceData() for k in pred_instances.keys(): - new_val = pred_instances.k[:num_instances] - pose_samples.pred_instances.k = new_val + new_val = pred_instances[k][:num_instances] + pred_instances_.set_field(new_val, k) + pred_instances = pred_instances_ elif num_instances < len(pred_instances): num_instances = len(pred_instances) @@ -464,6 +466,7 @@ def add_datasample(self, draw_bbox: bool = False, show_kpt_idx: bool = False, skeleton_style: str = 'mmpose', + num_instances: int = -1, show: bool = False, wait_time: float = 0, out_file: Optional[str] = None, @@ -499,6 +502,10 @@ def add_datasample(self, Defaults to ``False`` skeleton_style (str): Skeleton style selection. Defaults to ``'mmpose'`` + num_instances (int): Number of instances to be shown in 3D. If + smaller than 0, all the instances in the pose_result will be + shown. Otherwise, pad or truncate the pose_result to a length + of num_instances. Defaults to -1 show (bool): Whether to display the drawn image. Default to ``False`` wait_time (float): The interval of show (s). Defaults to 0 @@ -524,7 +531,10 @@ def add_datasample(self, det_img_data, det_data_sample.pred_instances) pred_img_data = self._draw_3d_data_samples( - image.copy(), data_sample, draw_gt=draw_gt) + image.copy(), + data_sample, + draw_gt=draw_gt, + num_instances=num_instances) # merge visualization results if det_img_data is not None and gt_img_data is not None: From 07104c9adf90887eee0ca326d6b8e218088e522d Mon Sep 17 00:00:00 2001 From: Tau Date: Tue, 4 Jul 2023 13:28:15 +0800 Subject: [PATCH 45/52] [Fix] Update NME default indices (#2508) --- mmpose/evaluation/metrics/keypoint_2d_metrics.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmpose/evaluation/metrics/keypoint_2d_metrics.py b/mmpose/evaluation/metrics/keypoint_2d_metrics.py index c6a63f1e51..5c8d23ac08 100644 --- a/mmpose/evaluation/metrics/keypoint_2d_metrics.py +++ b/mmpose/evaluation/metrics/keypoint_2d_metrics.py @@ -760,6 +760,8 @@ class NME(BaseMetric): 'cofw': [8, 9], # wflw: corresponding to `right-most` and `left-most` eye keypoints 'wflw': [60, 72], + # lapa: corresponding to `right-most` and `left-most` eye keypoints + 'lapa': [66, 79], } def __init__(self, From bb0e1e9017e41b394a6156a163bc96b53dd3ccdc Mon Sep 17 00:00:00 2001 From: Yifan Lareina WU Date: Tue, 4 Jul 2023 13:30:32 +0800 Subject: [PATCH 46/52] [Feature] Support Simplebaseline3D (#2500) --- configs/body_3d_keypoint/pose_lift/README.md | 51 ++++++ ...e-lift_simplebaseline3d_8xb64-200e_h36m.py | 168 ++++++++++++++++++ ...ose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py} | 0 ...3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py} | 0 ...ideopose3d-243frm-supv_8xb128-80e_h36m.py} | 0 ...27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py} | 0 ...pose3d-27frm-semi-supv_8xb64-200e_h36m.py} | 0 ...videopose3d-27frm-supv_8xb128-80e_h36m.py} | 0 ...videopose3d-81frm-supv_8xb128-80e_h36m.py} | 0 .../pose_lift/h36m/simplebaseline3d_h36m.md | 44 +++++ .../pose_lift/h36m/simplebaseline3d_h36m.yml | 21 +++ .../h36m/videopose3d_h36m.md} | 18 +- .../h36m/videopose3d_h36m.yml | 28 +-- .../video_pose_lift/README.md | 17 -- demo/docs/en/3d_human_pose_demo.md | 4 +- docs/en/user_guides/inference.md | 6 +- model-index.yml | 2 +- .../test_pose3d_inferencer.py | 4 +- 18 files changed, 315 insertions(+), 48 deletions(-) create mode 100644 configs/body_3d_keypoint/pose_lift/README.md create mode 100644 configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py} (100%) rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py} (100%) rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py} (100%) rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py} (100%) rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py} (100%) rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-27frm-supv_8xb128-80e_h36m.py} (100%) rename configs/body_3d_keypoint/{video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py => pose_lift/h36m/pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m.py} (100%) create mode 100644 configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.md create mode 100644 configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.yml rename configs/body_3d_keypoint/{video_pose_lift/h36m/videpose3d_h36m.md => pose_lift/h36m/videopose3d_h36m.md} (51%) rename configs/body_3d_keypoint/{video_pose_lift => pose_lift}/h36m/videopose3d_h36m.yml (71%) delete mode 100644 configs/body_3d_keypoint/video_pose_lift/README.md diff --git a/configs/body_3d_keypoint/pose_lift/README.md b/configs/body_3d_keypoint/pose_lift/README.md new file mode 100644 index 0000000000..7e5f9f7e2a --- /dev/null +++ b/configs/body_3d_keypoint/pose_lift/README.md @@ -0,0 +1,51 @@ +# Single-view 3D Human Body Pose Estimation + +## Video-based Single-view 3D Human Body Pose Estimation + +Video-based 3D pose estimation is the detection and analysis of X, Y, Z coordinates of human body joints from a sequence of RGB images. + +For single-person 3D pose estimation from a monocular camera, existing works can be classified into three categories: + +(1) from 2D poses to 3D poses (2D-to-3D pose lifting) + +(2) jointly learning 2D and 3D poses, and + +(3) directly regressing 3D poses from images. + +### Results and Models + +#### Human3.6m Dataset + +| Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | + +| :------------------------------------------------------ | :-------------: | :---: | :-----: | :-----: | :------------------------------------------------------: | :-----------------------------------------------------: | + +| [VideoPose3D-supervised](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-supv_8xb128-80e_h36m.py) | 27 | 40.1 | 30.1 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised_20210527.log.json) | + +| [VideoPose3D-supervised](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m.py) | 81 | 39.1 | 29.3 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised_20210527.log.json) | + +| [VideoPose3D-supervised](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py) | 243 | | | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_20210527.log.json) | + +| [VideoPose3D-supervised-CPN](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py) | 1 | 53.0 | 41.3 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft_20210527.log.json) | + +| [VideoPose3D-supervised-CPN](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py) | 243 | | | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft_20210527.log.json) | + +| [VideoPose3D-semi-supervised](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py) | 27 | 57.2 | 42.4 | 54.2 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_20210527.log.json) | + +| [VideoPose3D-semi-supervised-CPN](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py) | 27 | 67.3 | 50.4 | 63.6 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft_20210527.log.json) | + +## Image-based Single-view 3D Human Body Pose Estimation + +3D pose estimation is the detection and analysis of X, Y, Z coordinates of human body joints from an RGB image. +For single-person 3D pose estimation from a monocular camera, existing works can be classified into three categories: +(1) from 2D poses to 3D poses (2D-to-3D pose lifting) +(2) jointly learning 2D and 3D poses, and +(3) directly regressing 3D poses from images. + +### Results and Models + +#### Human3.6m Dataset + +| Arch | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | +| :------------------------------------------------------ | :-------------: | :---: | :-----: | :-----: | :------------------------------------------------------: | :-----------------------------------------------------: | +| [SimpleBaseline3D-tcn](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py) | 43.4 | 34.3 | /|[ckpt](https://download.openmmlab.com/mmpose/body3d/simple_baseline/simple3Dbaseline_h36m-f0ad73a4_20210419.pth) | [log](https://download.openmmlab.com/mmpose/body3d/simple_baseline/20210415_065056.log.json) | diff --git a/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py new file mode 100644 index 0000000000..b3c1c2db80 --- /dev/null +++ b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py @@ -0,0 +1,168 @@ +_base_ = ['../../../_base_/default_runtime.py'] + +vis_backends = [ + dict(type='LocalVisBackend'), +] +visualizer = dict( + type='Pose3dLocalVisualizer', vis_backends=vis_backends, name='visualizer') + +# runtime +train_cfg = dict(max_epochs=200, val_interval=10) + +# optimizer +optim_wrapper = dict(optimizer=dict(type='Adam', lr=1e-3)) + +# learning policy +param_scheduler = [ + dict(type='StepLR', step_size=100000, gamma=0.96, end=80, by_epoch=False) +] + +auto_scale_lr = dict(base_batch_size=512) + +# hooks +default_hooks = dict( + checkpoint=dict( + type='CheckpointHook', + save_best='MPJPE', + rule='less', + max_keep_ckpts=1)) + +# codec settings +# 3D keypoint normalization parameters +# From file: '{data_root}/annotation_body3d/fps50/joint3d_rel_stats.pkl' +target_mean = [[-2.55652589e-04, -7.11960570e-03, -9.81433052e-04], + [-5.65463051e-03, 3.19636009e-01, 7.19329269e-02], + [-1.01705840e-02, 6.91147892e-01, 1.55352986e-01], + [2.55651315e-04, 7.11954606e-03, 9.81423866e-04], + [-5.09729780e-03, 3.27040413e-01, 7.22258095e-02], + [-9.99656606e-03, 7.08277383e-01, 1.58016408e-01], + [2.90583676e-03, -2.11363307e-01, -4.74210915e-02], + [5.67537804e-03, -4.35088906e-01, -9.76974016e-02], + [5.93884964e-03, -4.91891970e-01, -1.10666618e-01], + [7.37352083e-03, -5.83948619e-01, -1.31171400e-01], + [5.41920653e-03, -3.83931702e-01, -8.68145417e-02], + [2.95964662e-03, -1.87567488e-01, -4.34536934e-02], + [1.26585822e-03, -1.20170579e-01, -2.82526049e-02], + [4.67186639e-03, -3.83644089e-01, -8.55125784e-02], + [1.67648571e-03, -1.97007177e-01, -4.31368364e-02], + [8.70569015e-04, -1.68664569e-01, -3.73902498e-02]], +target_std = [[0.11072244, 0.02238818, 0.07246294], + [0.15856311, 0.18933832, 0.20880479], + [0.19179935, 0.24320062, 0.24756193], + [0.11072181, 0.02238805, 0.07246253], + [0.15880454, 0.19977188, 0.2147063], + [0.18001944, 0.25052739, 0.24853247], + [0.05210694, 0.05211406, 0.06908241], + [0.09515367, 0.10133032, 0.12899733], + [0.11742458, 0.12648469, 0.16465091], + [0.12360297, 0.13085539, 0.16433336], + [0.14602232, 0.09707956, 0.13952731], + [0.24347532, 0.12982249, 0.20230181], + [0.2446877, 0.21501816, 0.23938235], + [0.13876084, 0.1008926, 0.1424411], + [0.23687529, 0.14491219, 0.20980829], + [0.24400695, 0.23975028, 0.25520584]] +# 2D keypoint normalization parameters +# From file: '{data_root}/annotation_body3d/fps50/joint2d_stats.pkl' +keypoints_mean = [[532.08351635, 419.74137558], [531.80953144, 418.2607141], + [530.68456967, 493.54259285], [529.36968722, 575.96448516], + [532.29767646, 421.28483336], [531.93946631, 494.72186795], + [529.71984447, 578.96110365], [532.93699382, 370.65225054], + [534.1101856, 317.90342311], [534.55416813, 304.24143901], + [534.86955004, 282.31030885], [534.11308566, 330.11296796], + [533.53637525, 376.2742511], [533.49380107, 391.72324565], + [533.52579142, 330.09494668], [532.50804964, 374.190479], + [532.72786934, 380.61615716]], +keypoints_std = [[107.73640054, 63.35908715], [119.00836213, 64.1215443], + [119.12412107, 50.53806215], [120.61688045, 56.38444891], + [101.95735275, 62.89636486], [106.24832897, 48.41178119], + [108.46734966, 54.58177071], [109.07369806, 68.70443672], + [111.20130351, 74.87287863], [111.63203838, 77.80542514], + [113.22330788, 79.90670556], [105.7145833, 73.27049436], + [107.05804267, 73.93175781], [107.97449418, 83.30391802], + [121.60675105, 74.25691526], [134.34378973, 77.48125087], + [131.79990652, 89.86721124]] +codec = dict( + type='ImagePoseLifting', + num_keypoints=17, + root_index=0, + remove_root=True, + target_mean=target_mean, + target_std=target_std, + keypoints_mean=keypoints_mean, + keypoints_std=keypoints_std) + +# model settings +model = dict( + type='PoseLifter', + backbone=dict( + type='TCN', + in_channels=2 * 17, + stem_channels=1024, + num_blocks=2, + kernel_sizes=(1, 1, 1), + dropout=0.5, + ), + head=dict( + type='TemporalRegressionHead', + in_channels=1024, + num_joints=16, + loss=dict(type='MSELoss'), + decoder=codec, + )) + +# base dataset settings +dataset_type = 'Human36mDataset' +data_root = 'data/h36m/' + +# pipelines +train_pipeline = [ + dict(type='GenerateTarget', encoder=codec), + dict( + type='PackPoseInputs', + meta_keys=('id', 'category_id', 'target_img_path', 'flip_indices', + 'target_root', 'target_root_index', 'target_mean', + 'target_std')) +] +val_pipeline = train_pipeline + +# data loaders +train_dataloader = dict( + batch_size=64, + num_workers=2, + persistent_workers=True, + sampler=dict(type='DefaultSampler', shuffle=True), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_train.npz', + seq_len=1, + causal=True, + keypoint_2d_src='gt', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +val_dataloader = dict( + batch_size=64, + num_workers=2, + persistent_workers=True, + drop_last=False, + sampler=dict(type='DefaultSampler', shuffle=False, round_up=False), + dataset=dict( + type=dataset_type, + ann_file='annotation_body3d/fps50/h36m_test.npz', + seq_len=1, + causal=True, + keypoint_2d_src='gt', + data_root=data_root, + data_prefix=dict(img='images/'), + pipeline=train_pipeline, + )) +test_dataloader = val_dataloader + +# evaluators +val_evaluator = [ + dict(type='MPJPE', mode='mpjpe'), + dict(type='MPJPE', mode='p-mpjpe') +] +test_evaluator = val_evaluator diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-supv_8xb128-80e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-supv_8xb128-80e_h36m.py diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py b/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m.py similarity index 100% rename from configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py rename to configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m.py diff --git a/configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.md b/configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.md new file mode 100644 index 0000000000..9bc1876315 --- /dev/null +++ b/configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.md @@ -0,0 +1,44 @@ + + +
    +SimpleBaseline3D (ICCV'2017) + +```bibtex +@inproceedings{martinez_2017_3dbaseline, + title={A simple yet effective baseline for 3d human pose estimation}, + author={Martinez, Julieta and Hossain, Rayat and Romero, Javier and Little, James J.}, + booktitle={ICCV}, + year={2017} +} +``` + +
    + + + +
    +Human3.6M (TPAMI'2014) + +```bibtex +@article{h36m_pami, + author = {Ionescu, Catalin and Papava, Dragos and Olaru, Vlad and Sminchisescu, Cristian}, + title = {Human3.6M: Large Scale Datasets and Predictive Methods for 3D Human Sensing in Natural Environments}, + journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher = {IEEE Computer Society}, + volume = {36}, + number = {7}, + pages = {1325-1339}, + month = {jul}, + year = {2014} +} +``` + +
    + +Results on Human3.6M dataset with ground truth 2D detections + +| Arch | MPJPE | P-MPJPE | ckpt | log | +| :-------------------------------------------------------------- | :---: | :-----: | :-------------------------------------------------------------: | :------------------------------------------------------------: | +| [SimpleBaseline3D-tcn1](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py) | 43.4 | 34.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/simple_baseline/simple3Dbaseline_h36m-f0ad73a4_20210419.pth) | [log](https://download.openmmlab.com/mmpose/body3d/simple_baseline/20210415_065056.log.json) | + +1 Differing from the original paper, we didn't apply the `max-norm constraint` because we found this led to a better convergence and performance. diff --git a/configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.yml b/configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.yml new file mode 100644 index 0000000000..1a8f32f82c --- /dev/null +++ b/configs/body_3d_keypoint/pose_lift/h36m/simplebaseline3d_h36m.yml @@ -0,0 +1,21 @@ +Collections: +- Name: SimpleBaseline3D + Paper: + Title: A simple yet effective baseline for 3d human pose estimation + URL: http://openaccess.thecvf.com/content_iccv_2017/html/Martinez_A_Simple_yet_ICCV_2017_paper.html + README: https://github.com/open-mmlab/mmpose/blob/main/docs/en/papers/algorithms/simplebaseline3d.md +Models: +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_simplebaseline3d_8xb64-200e_h36m.py + In Collection: SimpleBaseline3D + Metadata: + Architecture: &id001 + - SimpleBaseline3D + Training Data: Human3.6M + Name: pose-lift_simplebaseline3d_8xb64-200e_h36m + Results: + - Dataset: Human3.6M + Metrics: + MPJPE: 43.4 + P-MPJPE: 34.3 + Task: Body 3D Keypoint + Weights: https://download.openmmlab.com/mmpose/body3d/simple_baseline/simple3Dbaseline_h36m-f0ad73a4_20210419.pth diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md b/configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.md similarity index 51% rename from configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md rename to configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.md index c36ef29df9..f1c75d786a 100644 --- a/configs/body_3d_keypoint/video_pose_lift/h36m/videpose3d_h36m.md +++ b/configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.md @@ -41,27 +41,27 @@ Testing results on Human3.6M dataset with ground truth 2D detections, supervised | Arch | Receptive Field | MPJPE | P-MPJPE | ckpt | log | | :--------------------------------------------------------- | :-------------: | :---: | :-----: | :--------------------------------------------------------: | :-------------------------------------------------------: | -| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py) | 27 | 40.1 | 30.1 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised_20210527.log.json) | -| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py) | 81 | 39.1 | 29.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised_20210527.log.json) | -| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py) | 243 | | | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-supv_8xb128-80e_h36m.py) | 27 | 40.1 | 30.1 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m.py) | 81 | 39.1 | 29.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py) | 243 | | | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_20210527.log.json) | Testing results on Human3.6M dataset with CPN 2D detections1, supervised training | Arch | Receptive Field | MPJPE | P-MPJPE | ckpt | log | | :--------------------------------------------------------- | :-------------: | :---: | :-----: | :--------------------------------------------------------: | :-------------------------------------------------------: | -| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py) | 1 | 53.0 | 41.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft_20210527.log.json) | -| [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py) | 243 | | | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py) | 1 | 53.0 | 41.3 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft_20210527.log.json) | +| [VideoPose3D](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py) | 243 | | | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft_20210527.log.json) | Testing results on Human3.6M dataset with ground truth 2D detections, semi-supervised training | Training Data | Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | | :------------ | :-------------------------------------------------: | :-------------: | :---: | :-----: | :-----: | :-------------------------------------------------: | :-------------------------------------------------: | -| 10% S1 | [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py) | 27 | 57.2 | 42.4 | 54.2 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_20210527.log.json) | +| 10% S1 | [VideoPose3D](/configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py) | 27 | 57.2 | 42.4 | 54.2 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_20210527.log.json) | Testing results on Human3.6M dataset with CPN 2D detections1, semi-supervised training -| Training Data | Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | -| :------------ | :-------------------------------------------------: | :-------------: | :---: | :-----: | :-----: | :-------------------------------------------------: | :-------------------------------------------------: | -| 10% S1 | [VideoPose3D](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py) | 27 | 67.3 | 50.4 | 63.6 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft_20210527.log.json) | +| Training Data | Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | +| :------------ | :----------------------------: | :-------------: | :---: | :-----: | :-----: | :------------------------------------------------------------: | :-----------------------------------------------------------: | +| 10% S1 | [VideoPose3D](/configs/xxx.py) | 27 | 67.3 | 50.4 | 63.6 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft_20210527.log.json) | 1 CPN 2D detections are provided by [official repo](https://github.com/facebookresearch/VideoPose3D/blob/master/DATASETS.md). The reformatted version used in this repository can be downloaded from [train_detection](https://download.openmmlab.com/mmpose/body3d/videopose/cpn_ft_h36m_dbb_train.npy) and [test_detection](https://download.openmmlab.com/mmpose/body3d/videopose/cpn_ft_h36m_dbb_test.npy). diff --git a/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml b/configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.yml similarity index 71% rename from configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml rename to configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.yml index 3bce33af2d..6b9d92c115 100644 --- a/configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml +++ b/configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.yml @@ -6,13 +6,13 @@ Collections: URL: http://openaccess.thecvf.com/content_CVPR_2019/html/Pavllo_3D_Human_Pose_Estimation_in_Video_With_Temporal_Convolutions_and_CVPR_2019_paper.html README: https://github.com/open-mmlab/mmpose/blob/main/docs/en/papers/algorithms/videopose3d.md Models: -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py In Collection: VideoPose3D Metadata: Architecture: &id001 - VideoPose3D Training Data: Human3.6M - Name: vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m + Name: pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m Results: - Dataset: Human3.6M Metrics: @@ -20,12 +20,12 @@ Models: P-MPJPE: 30.1 Task: Body 3D Keypoint Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m.py In Collection: VideoPose3D Metadata: Architecture: *id001 Training Data: Human3.6M - Name: vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m + Name: pose-lift_videopose3d-81frm-supv_8xb128-80e_h36m Results: - Dataset: Human3.6M Metrics: @@ -33,12 +33,12 @@ Models: P-MPJPE: 29.2 Task: Body 3D Keypoint Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m.py In Collection: VideoPose3D Metadata: Architecture: *id001 Training Data: Human3.6M - Name: vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m + Name: pose-lift_videopose3d-243frm-supv_8xb128-80e_h36m Results: - Dataset: Human3.6M Metrics: @@ -46,12 +46,12 @@ Models: P-MPJPE: 28.3 Task: Body 3D Keypoint Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py In Collection: VideoPose3D Metadata: Architecture: *id001 Training Data: Human3.6M - Name: vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m + Name: pose-lift_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m Results: - Dataset: Human3.6M Metrics: @@ -59,13 +59,13 @@ Models: P-MPJPE: 41.3 Task: Body 3D Keypoint Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py In Collection: VideoPose3D Alias: human3d Metadata: Architecture: *id001 Training Data: Human3.6M - Name: vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m + Name: pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m Results: - Dataset: Human3.6M Metrics: @@ -73,12 +73,12 @@ Models: P-MPJPE: 38.0 Task: Body 3D Keypoint Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py In Collection: VideoPose3D Metadata: Architecture: *id001 Training Data: Human3.6M - Name: vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m + Name: pose-lift_videopose3d-27frm-semi-supv_8xb64-200e_h36m Results: - Dataset: Human3.6M Metrics: @@ -87,12 +87,12 @@ Models: P-MPJPE: 42.8 Task: Body 3D Keypoint Weights: https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth -- Config: configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py +- Config: configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py In Collection: VideoPose3D Metadata: Architecture: *id001 Training Data: Human3.6M - Name: vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m + Name: pose-lift_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m Results: - Dataset: Human3.6M Metrics: diff --git a/configs/body_3d_keypoint/video_pose_lift/README.md b/configs/body_3d_keypoint/video_pose_lift/README.md deleted file mode 100644 index c23b69ea7f..0000000000 --- a/configs/body_3d_keypoint/video_pose_lift/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# 3D human pose estimation in video with temporal convolutions and semi-supervised training - -Based on the success of 2d human pose estimation, it directly "lifts" a sequence of 2d keypoints to 3d keypoints. - -## Results and Models - -### Human3.6m Dataset - -| Arch | Receptive Field | MPJPE | P-MPJPE | N-MPJPE | ckpt | log | -| :------------------------------------------------------ | :-------------: | :---: | :-----: | :-----: | :------------------------------------------------------: | :-----------------------------------------------------: | -| [VideoPose3D-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-supv_8xb128-80e_h36m.py) | 27 | 40.1 | 30.1 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised-fe8fbba9_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_supervised_20210527.log.json) | -| [VideoPose3D-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-81frm-supv_8xb128-80e_h36m.py) | 81 | 39.1 | 29.3 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised-1f2d1104_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_81frames_fullconv_supervised_20210527.log.json) | -| [VideoPose3D-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv_8xb128-80e_h36m.py) | 243 | | | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised-880bea25_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_20210527.log.json) | -| [VideoPose3D-supervised-CPN](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-1frm-supv-cpn-ft_8xb128-80e_h36m.py) | 1 | 53.0 | 41.3 | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft-5c3afaed_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_1frame_fullconv_supervised_cpn_ft_20210527.log.json) | -| [VideoPose3D-supervised-CPN](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py) | 243 | | | / | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft_20210527.log.json) | -| [VideoPose3D-semi-supervised](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv_8xb64-200e_h36m.py) | 27 | 57.2 | 42.4 | 54.2 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised-54aef83b_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_20210527.log.json) | -| [VideoPose3D-semi-supervised-CPN](/configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-27frm-semi-supv-cpn-ft_8xb64-200e_h36m.py) | 27 | 67.3 | 50.4 | 63.6 | [ckpt](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft-71be9cde_20210527.pth) | [log](https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_27frames_fullconv_semi-supervised_cpn_ft_20210527.log.json) | diff --git a/demo/docs/en/3d_human_pose_demo.md b/demo/docs/en/3d_human_pose_demo.md index 96fb0137af..367d98c403 100644 --- a/demo/docs/en/3d_human_pose_demo.md +++ b/demo/docs/en/3d_human_pose_demo.md @@ -54,7 +54,7 @@ demo/mmdetection_cfg/faster_rcnn_r50_fpn_coco.py \ https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_8xb32-210e_coco-256x192.py \ https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w48_coco_256x192-b9e0b3ab_20200708.pth \ -configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py \ +configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py \ https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth \ --input https://user-images.githubusercontent.com/87690686/164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4 \ --output-root vis_results \ @@ -69,7 +69,7 @@ demo/mmdetection_cfg/faster_rcnn_r50_fpn_coco.py \ https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ configs/body_2d_keypoint/topdown_heatmap/posetrack18/td-hm_hrnet-w48_8xb64-20e_posetrack18-384x288.py \ https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w48_posetrack18_384x288-5fd6d3ff_20211130.pth \ -configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py \ +configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py \ https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth \ --input https://user-images.githubusercontent.com/87690686/164970135-b14e424c-765a-4180-9bc8-fa8d6abc5510.mp4 \ --output-root vis_results \ diff --git a/docs/en/user_guides/inference.md b/docs/en/user_guides/inference.md index 055f283293..fa51aa20fa 100644 --- a/docs/en/user_guides/inference.md +++ b/docs/en/user_guides/inference.md @@ -274,9 +274,9 @@ The MMPose library has predefined aliases for several frequently used models. Th The following table lists the available 3D model aliases and their corresponding configuration names: -| Alias | Configuration Name | Task | 3D Pose Estimator | 2D Pose Estimator | Detector | -| ------- | ------------------------------------------------------ | ------------------------ | ----------------- | ----------------- | -------- | -| human3d | vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m | Human 3D pose estimation | VideoPose3D | RTMPose-m | RTMDet-m | +| Alias | Configuration Name | Task | 3D Pose Estimator | 2D Pose Estimator | Detector | +| ------- | --------------------------------------------------------- | ------------------------ | ----------------- | ----------------- | -------- | +| human3d | pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m | Human 3D pose estimation | VideoPose3D | RTMPose-m | RTMDet-m | In addition, users can utilize the CLI tool to display all available aliases with the following command: diff --git a/model-index.yml b/model-index.yml index 1965917479..498e5bc743 100644 --- a/model-index.yml +++ b/model-index.yml @@ -74,7 +74,7 @@ Import: - configs/body_2d_keypoint/topdown_regression/coco/mobilenetv2_rle_coco.yml - configs/body_2d_keypoint/topdown_regression/mpii/resnet_mpii.yml - configs/body_2d_keypoint/topdown_regression/mpii/resnet_rle_mpii.yml -- configs/body_3d_keypoint/video_pose_lift/h36m/videopose3d_h36m.yml +- configs/body_3d_keypoint/pose_lift/h36m/videopose3d_h36m.yml - configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose_coco_wholebody_face.yml - configs/face_2d_keypoint/rtmpose/wflw/rtmpose_wflw.yml - configs/face_2d_keypoint/topdown_heatmap/300w/hrnetv2_300w.yml diff --git a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py index 09c31d3d64..4a3f5a613e 100644 --- a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py +++ b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py @@ -41,7 +41,7 @@ def test_init(self): # 1. init with config path and checkpoint inferencer = Pose3DInferencer( model= # noqa - 'configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py', # noqa + 'configs/body_3d_keypoint/pose_lift/h36m/pose-lift_videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py', # noqa weights= # noqa 'https://download.openmmlab.com/mmpose/body3d/videopose/videopose_h36m_243frames_fullconv_supervised_cpn_ft-88f5abbb_20210527.pth', # noqa pose2d_model='configs/body_2d_keypoint/simcc/coco/' @@ -57,7 +57,7 @@ def test_init(self): # 2. init with config name inferencer = Pose3DInferencer( - model='configs/body_3d_keypoint/video_pose_lift/h36m/vid-pl_' + model='configs/body_3d_keypoint/pose_lift/h36m/pose-lift_' 'videopose3d-243frm-supv-cpn-ft_8xb128-200e_h36m.py', pose2d_model='configs/body_2d_keypoint/simcc/coco/' 'simcc_res50_8xb64-210e_coco-256x192.py', From 39d58da4e2a33eaabe9917127734c0b351999374 Mon Sep 17 00:00:00 2001 From: Tau Date: Tue, 4 Jul 2023 14:28:40 +0800 Subject: [PATCH 47/52] [Docs] Update Docs (#2509) --- docs/zh_cn/user_guides/train_and_test.md | 2 ++ docs/zh_cn/user_guides/useful_tools.md | 2 ++ docs/zh_cn/user_guides/visualization.md | 2 ++ 3 files changed, 6 insertions(+) diff --git a/docs/zh_cn/user_guides/train_and_test.md b/docs/zh_cn/user_guides/train_and_test.md index 3cddc5c715..452eddc928 100644 --- a/docs/zh_cn/user_guides/train_and_test.md +++ b/docs/zh_cn/user_guides/train_and_test.md @@ -1,3 +1,5 @@ # 训练与测试 中文内容建设中,暂时请查阅[英文版文档](../../en/user_guides/train_and_test.md) + +如果您愿意参与中文文档的翻译与维护,我们团队将十分感谢您的贡献!欢迎加入我们的社区群与我们取得联系,或直接按照 [如何给 MMPose 贡献代码](../contribution_guide.md) 在 GitHub 上提交 Pull Request。 diff --git a/docs/zh_cn/user_guides/useful_tools.md b/docs/zh_cn/user_guides/useful_tools.md index d7e027e609..f2ceb771b7 100644 --- a/docs/zh_cn/user_guides/useful_tools.md +++ b/docs/zh_cn/user_guides/useful_tools.md @@ -1,3 +1,5 @@ # 常用工具 中文内容建设中,暂时请查阅[英文版文档](../../en/user_guides/useful_tools.md) + +如果您愿意参与中文文档的翻译与维护,我们团队将十分感谢您的贡献!欢迎加入我们的社区群与我们取得联系,或直接按照 [如何给 MMPose 贡献代码](../contribution_guide.md) 在 GitHub 上提交 Pull Request。 diff --git a/docs/zh_cn/user_guides/visualization.md b/docs/zh_cn/user_guides/visualization.md index ffd20af99a..a584eb450e 100644 --- a/docs/zh_cn/user_guides/visualization.md +++ b/docs/zh_cn/user_guides/visualization.md @@ -1,3 +1,5 @@ # 可视化 中文内容建设中,暂时请查阅[英文版文档](../../en/user_guides/visualization.md) + +如果您愿意参与中文文档的翻译与维护,我们团队将十分感谢您的贡献!欢迎加入我们的社区群与我们取得联系,或直接按照 [如何给 MMPose 贡献代码](../contribution_guide.md) 在 GitHub 上提交 Pull Request。 From c6424b848758144edde62239a650b905b81a770c Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Tue, 4 Jul 2023 14:29:17 +0800 Subject: [PATCH 48/52] [Fix] Fix numpy bug on Colab (#2505) --- setup.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/setup.py b/setup.py index c26bea7866..8b3265fb70 100644 --- a/setup.py +++ b/setup.py @@ -6,6 +6,12 @@ import warnings from setuptools import find_packages, setup +try: + import google.colab # noqa + ON_COLAB = True +except ImportError: + ON_COLAB = False + def readme(): with open('README.md', encoding='utf-8') as f: @@ -78,6 +84,16 @@ def parse_line(line): else: version = rest # NOQA info['version'] = (op, version) + + if ON_COLAB and info['package'] == 'xtcocotools': + # Due to an incompatibility between the Colab platform and the + # pre-built xtcocotools PyPI package, it is necessary to + # compile xtcocotools from source on Colab. + info = dict( + line=info['line'], + package='xtcocotools@' + 'git+https://github.com/jin-s13/xtcocoapi') + yield info def parse_require_file(fpath): From 52e4925724443f9792ceecd09276d727dcbad645 Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Tue, 4 Jul 2023 16:28:34 +0800 Subject: [PATCH 49/52] [Fix] fix ckpt urls & test settings (#2510) --- .../ap10k/cspnext-m_udp_8xb64-210e_ap10k-256x256.py | 2 +- configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.md | 2 +- configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.yml | 4 ++-- configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md | 2 +- .../body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.yml | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/configs/animal_2d_keypoint/topdown_heatmap/ap10k/cspnext-m_udp_8xb64-210e_ap10k-256x256.py b/configs/animal_2d_keypoint/topdown_heatmap/ap10k/cspnext-m_udp_8xb64-210e_ap10k-256x256.py index fa3139a71a..844d17df4e 100644 --- a/configs/animal_2d_keypoint/topdown_heatmap/ap10k/cspnext-m_udp_8xb64-210e_ap10k-256x256.py +++ b/configs/animal_2d_keypoint/topdown_heatmap/ap10k/cspnext-m_udp_8xb64-210e_ap10k-256x256.py @@ -73,7 +73,7 @@ loss=dict(type='KeypointMSELoss', use_target_weight=True), decoder=codec), test_cfg=dict( - flip_test=False, + flip_test=True, flip_mode='heatmap', shift_heatmap=False, )) diff --git a/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.md b/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.md index ea58d95b7f..0bbedbe696 100644 --- a/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.md +++ b/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.md @@ -52,5 +52,5 @@ Results on CrowdPose test without multi-scale test | Arch | Input Size | AP | AP50 | AP75 | AP (E) | AP (M) | AP (H) | ckpt | log | | :--------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :----: | :----: | :----: | :--------------------------------------------: | :-------------------------------------------: | -| [HRNet-w32](/configs/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-300e_crowdpose-512x512.py) | 512x512 | 0.663 | 0.857 | 0.714 | 0.740 | 0.671 | 0.576 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-140e_crowdpose-512x512_147bae97-20221228.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-140e_crowdpose-512x512_20221228.json) | +| [HRNet-w32](/configs/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-300e_crowdpose-512x512.py) | 512x512 | 0.663 | 0.857 | 0.714 | 0.740 | 0.671 | 0.576 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-300e_crowdpose-512x512_147bae97-20221228.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-300e_crowdpose-512x512_20221228.json) | | [HRNet-w48](/configs/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w48_8xb5-300e_crowdpose-640x640.py) | 640x640 | 0.679 | 0.869 | 0.731 | 0.753 | 0.688 | 0.593 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w48_8xb5-300e_crowdpose-640x640_4ea6031e-20230128.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w48_8xb5-300e_crowdpose-640x640_20230128.json) | diff --git a/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.yml b/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.yml index 02312e8cba..5bbb7f4b25 100644 --- a/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.yml +++ b/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.yml @@ -17,7 +17,7 @@ Models: AP (M): 0.671 AP (L): 0.576 Task: Body 2D Keypoint - Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-140e_crowdpose-512x512_147bae97-20221228.pth + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-300e_crowdpose-512x512_147bae97-20221228.pth - Config: configs/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w48_8xb5-300e_crowdpose-640x640.py In Collection: DEKR Metadata: @@ -34,4 +34,4 @@ Models: AP (M): 0.688 AP (L): 0.593 Task: Body 2D Keypoint - Weights: https://download.openmmlab.com/mmpose/bottom_up/dekr/hrnet_w48_crowdpose_640x640-ef6b6040_20220930.pth + Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w48_8xb5-300e_crowdpose-640x640_4ea6031e-20230128.pth diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md index 07563f89be..68baf35aec 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.md @@ -57,5 +57,5 @@ Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 da | :-------------------------------------------- | :--------: | :---: | :-------------: | :-------------: | :---: | :-------------: | :-------------------------------------------: | :-------------------------------------------: | | [ViTPose-S](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py) | 256x192 | 0.736 | 0.900 | 0.811 | 0.790 | 0.940 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192-4c101a76_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192-4c101a76_20230314.json) | | [ViTPose-B](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py) | 256x192 | 0.756 | 0.906 | 0.826 | 0.809 | 0.946 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192-0b8234ea_20230407.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192-0b8234ea_20230407.json) | -| [ViTPose-L](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py) | 256x192 | 0.781 | 0.914 | 0.853 | 0.833 | 0.952 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192-3a7ee9e1_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192-3a7ee9e1_20230314.json) | +| [ViTPose-L](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py) | 256x192 | 0.780 | 0.914 | 0.851 | 0.833 | 0.952 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192-3a7ee9e1_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192-3a7ee9e1_20230314.json) | | [ViTPose-H](/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py) | 256x192 | 0.789 | 0.916 | 0.856 | 0.839 | 0.953 | [ckpt](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192-ffd48c05_20230314.pth) | [log](https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192-ffd48c05_20230314.json) | diff --git a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.yml b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.yml index 6d1cc7db15..10cc7bf972 100644 --- a/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.yml +++ b/configs/body_2d_keypoint/topdown_heatmap/coco/vitpose_coco.yml @@ -128,9 +128,9 @@ Models: Results: - Dataset: COCO Metrics: - AP: 0.781 + AP: 0.780 AP@0.5: 0.914 - AP@0.75: 0.853 + AP@0.75: 0.851 AR: 0.833 AR@0.5: 0.952 Task: Body 2D Keypoint From 1681afb0407898c0ec140b19dfba37fce36eb40d Mon Sep 17 00:00:00 2001 From: Tau Date: Tue, 4 Jul 2023 16:59:01 +0800 Subject: [PATCH 50/52] [Fix] Fix new config with read_base (#2512) --- .../rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py | 4 +++- .../rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py | 4 +++- .../coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py | 4 +++- .../yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py | 4 +++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py index 759750110c..af102ec20e 100644 --- a/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py +++ b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_m_8xb256-420e_coco-256x192.py @@ -1,5 +1,7 @@ # Copyright (c) OpenMMLab. All rights reserved. -if '_base_': +from mmengine.config import read_base + +with read_base(): from mmpose.configs._base_.default_runtime import * from albumentations.augmentations import Blur, CoarseDropout, MedianBlur diff --git a/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py index f49c7a2e23..6fc5ec0abe 100644 --- a/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py +++ b/mmpose/configs/body_2d_keypoint/rtmpose/coco/rtmpose_s_8xb256_420e_aic_coco_256x192.py @@ -1,5 +1,7 @@ # Copyright (c) OpenMMLab. All rights reserved. -if '_base_': +from mmengine.config import read_base + +with read_base(): from mmpose.configs._base_.default_runtime import * from albumentations.augmentations import Blur, CoarseDropout, MedianBlur diff --git a/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py b/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py index 9c73e1551a..1ecf3a704e 100644 --- a/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py +++ b/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py @@ -1,5 +1,7 @@ # Copyright (c) OpenMMLab. All rights reserved. -if '_base_': +from mmengine.config import read_base + +with read_base(): from mmpose.configs._base_.default_runtime import * from mmengine.dataset import DefaultSampler diff --git a/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py b/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py index 2ff6f31463..9a75e35e8d 100644 --- a/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py +++ b/projects/yolox_pose/configs/py_yolox_pose_s_8xb32_300e_coco.py @@ -1,4 +1,6 @@ -if '_base_': +from mmengine.config import read_base + +with read_base(): from ._base_.py_default_runtime import * from datasets import (CocoDataset, FilterDetPoseAnnotations, PackDetPoseInputs, From 0855fca78a69087de17f41f4c59d537eefc96597 Mon Sep 17 00:00:00 2001 From: Tau Date: Tue, 4 Jul 2023 20:19:07 +0800 Subject: [PATCH 51/52] bump v1.1.0 (#2513) --- README.md | 21 +++-- README_CN.md | 19 +++-- docs/en/dataset_zoo/2d_animal_keypoint.md | 3 +- docs/en/faq.md | 1 + docs/en/installation.md | 14 ++-- docs/src/papers/datasets/animalkingdom.md | 3 +- docs/zh_cn/dataset_zoo/2d_animal_keypoint.md | 3 +- docs/zh_cn/faq.md | 1 + docs/zh_cn/installation.md | 14 ++-- docs/zh_cn/notes/changelog.md | 87 ++++++++++++++++++++ mmpose/version.py | 2 +- 11 files changed, 132 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index af54288892..b250d570b3 100644 --- a/README.md +++ b/README.md @@ -97,9 +97,12 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- ## What's New -- We are excited to release **YOLOX-Pose**, a One-Stage multi-person pose estimation model based on YOLOX. Checkout our [project page](/projects/yolox_pose/) for more details. +- We are glad to support 3 new datasets: + - (CVPR 2023) [Human-Art](https://github.com/IDEA-Research/HumanArt) + - (CVPR 2022) [Animal Kingdom](https://github.com/sutdcv/Animal-Kingdom) + - (AAAI 2020) [LaPa](https://github.com/JDAI-CV/lapa-dataset/) -![yolox-pose_intro](https://user-images.githubusercontent.com/26127467/226655503-3cee746e-6e42-40be-82ae-6e7cae2a4c7e.jpg) +![image](https://github.com/open-mmlab/mmpose/assets/13503330/c9171dbb-7e7a-4c39-98e3-c92932182efb) - Welcome to [*projects of MMPose*](/projects/README.md), where you can access to the latest features of MMPose, and share your ideas and codes with the community at once. Contribution to MMPose will be simple and smooth: @@ -110,18 +113,20 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- - [RTMPose](/projects/rtmpose/) - [YOLOX-Pose](/projects/yolox_pose/) - [MMPose4AIGC](/projects/mmpose4aigc/) + - [Simple Keypoints](/projects/skps/) - Become a contributors and make MMPose greater. Start your journey from the [example project](/projects/example_project/)
    -- 2022-04-06: MMPose [v1.0.0](https://github.com/open-mmlab/mmpose/releases/tag/v1.0.0) is officially released, with the main updates including: +- 2023-07-04: MMPose [v1.1.0](https://github.com/open-mmlab/mmpose/releases/tag/v1.1.0) is officially released, with the main updates including: - - Release of [YOLOX-Pose](/projects/yolox_pose/), a One-Stage multi-person pose estimation model based on YOLOX - - Development of [MMPose for AIGC](/projects/mmpose4aigc/) based on RTMPose, generating high-quality skeleton images for Pose-guided AIGC projects - - Support for OpenPose-style skeleton visualization - - More complete and user-friendly [documentation and tutorials](https://mmpose.readthedocs.io/en/latest/overview.html) + - Support new datasets: Human-Art, Animal Kingdom and LaPa. + - Support new config type that is more user-friendly and flexible. + - Improve RTMPose with better performance. + - Migrate 3D pose estimation models on h36m. + - Inference speedup and webcam inference with all demo scripts. - Please refer to the [release notes](https://github.com/open-mmlab/mmpose/releases/tag/v1.0.0) for more updates brought by MMPose v1.0.0! + Please refer to the [release notes](https://github.com/open-mmlab/mmpose/releases/tag/v1.1.0) for more updates brought by MMPose v1.1.0! ## 0.x / 1.x Migration diff --git a/README_CN.md b/README_CN.md index 80c4b39aad..48672c2a88 100644 --- a/README_CN.md +++ b/README_CN.md @@ -95,7 +95,10 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- ## 最新进展 -- 我们发布了 **YOLOX-Pose**,一个基于 YOLOX 的 One-Stage 多人姿态估计模型。更多信息敬请参阅 YOLOX-Pose [项目主页](/projects/yolox_pose/) +- 我们支持了三个新的数据集: + - (CVPR 2023) [Human-Art](https://github.com/IDEA-Research/HumanArt) + - (CVPR 2022) [Animal Kingdom](https://github.com/sutdcv/Animal-Kingdom) + - (AAAI 2020) [LaPa](https://github.com/JDAI-CV/lapa-dataset/) ![yolox-pose_intro](https://user-images.githubusercontent.com/26127467/226655503-3cee746e-6e42-40be-82ae-6e7cae2a4c7e.jpg) @@ -108,18 +111,20 @@ https://user-images.githubusercontent.com/15977946/124654387-0fd3c500-ded1-11eb- - [RTMPose](/projects/rtmpose/) - [YOLOX-Pose](/projects/yolox_pose/) - [MMPose4AIGC](/projects/mmpose4aigc/) + - [Simple Keypoints](/projects/skps/) - 从简单的 [示例项目](/projects/example_project/) 开启您的 MMPose 代码贡献者之旅吧,让我们共同打造更好用的 MMPose!
    -- 2022-04-06:MMPose [v1.0.0](https://github.com/open-mmlab/mmpose/releases/tag/v1.0.0) 正式发布了,主要更新包括: +- 2023-07-04:MMPose [v1.1.0](https://github.com/open-mmlab/mmpose/releases/tag/v1.1.0) 正式发布了,主要更新包括: - - 发布了 [YOLOX-Pose](/projects/yolox_pose/),一个基于 YOLOX 的 One-Stage 多人姿态估计模型 - - 基于 RTMPose 开发的 [MMPose for AIGC](/projects/mmpose4aigc/),生成高质量骨架图片用于 Pose-guided AIGC 项目 - - 支持 OpenPose 风格的骨架可视化 - - 更加完善、友好的 [文档和教程](https://mmpose.readthedocs.io/zh_CN/latest/overview.html) + - 支持新数据集:Human-Art、Animal Kingdom、LaPa。 + - 支持新的配置文件风格,支持 IDE 跳转和搜索。 + - 提供更强性能的 RTMPose 模型。 + - 迁移 3D 姿态估计算法。 + - 加速推理脚本,全部 demo 脚本支持摄像头推理。 - 请查看完整的 [版本说明](https://github.com/open-mmlab/mmpose/releases/tag/v1.0.0) 以了解更多 MMPose v1.0.0 带来的更新! + 请查看完整的 [版本说明](https://github.com/open-mmlab/mmpose/releases/tag/v1.1.0) 以了解更多 MMPose v1.1.0 带来的更新! ## 0.x / 1.x 迁移 diff --git a/docs/en/dataset_zoo/2d_animal_keypoint.md b/docs/en/dataset_zoo/2d_animal_keypoint.md index 11087c2766..9ef6022ecc 100644 --- a/docs/en/dataset_zoo/2d_animal_keypoint.md +++ b/docs/en/dataset_zoo/2d_animal_keypoint.md @@ -480,8 +480,7 @@ mmpose ```bibtex -@InProceedings{ - Ng_2022_CVPR, +@inproceedings{Ng_2022_CVPR, author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, diff --git a/docs/en/faq.md b/docs/en/faq.md index 99da761047..3e81a312ca 100644 --- a/docs/en/faq.md +++ b/docs/en/faq.md @@ -19,6 +19,7 @@ Detailed compatible MMPose and MMCV versions are shown as below. Please choose t | MMPose version | MMCV/MMEngine version | | :------------: | :-----------------------------: | +| 1.1.0 | mmcv>=2.0.1, mmengine>=0.8.0 | | 1.0.0 | mmcv>=2.0.0, mmengine>=0.7.0 | | 1.0.0rc1 | mmcv>=2.0.0rc4, mmengine>=0.6.0 | | 1.0.0rc0 | mmcv>=2.0.0rc0, mmengine>=0.0.1 | diff --git a/docs/en/installation.md b/docs/en/installation.md index 7285982059..47db25bb5f 100644 --- a/docs/en/installation.md +++ b/docs/en/installation.md @@ -23,7 +23,7 @@ We recommend that users follow our best practices to install MMPose. However, th In this section we demonstrate how to prepare an environment with PyTorch. -MMPose works on Linux, Windows and macOS. It requires Python 3.7+, CUDA 9.2+ and PyTorch 1.6+. +MMPose works on Linux, Windows and macOS. It requires Python 3.7+, CUDA 9.2+ and PyTorch 1.8+. If you are experienced with PyTorch and have already installed it, you can skip this part and jump to the [MMPose Installation](#install-mmpose). Otherwise, you can follow these steps for the preparation. @@ -59,13 +59,13 @@ conda install pytorch torchvision cpuonly -c pytorch ```shell pip install -U openmim mim install mmengine -mim install "mmcv>=2.0.0" +mim install "mmcv>=2.0.1" ``` Note that some of the demo scripts in MMPose require [MMDetection](https://github.com/open-mmlab/mmdetection) (mmdet) for human detection. If you want to run these demo scripts with mmdet, you can easily install mmdet as a dependency by running: ```shell -mim install "mmdet>=3.0.0" +mim install "mmdet>=3.1.0" ``` ## Best Practices @@ -89,7 +89,7 @@ pip install -v -e . To use mmpose as a dependency or third-party package, install it with pip: ```shell -mim install "mmpose>=1.0.0" +mim install "mmpose>=1.1.0" ``` ## Verify the installation @@ -173,7 +173,7 @@ To install MMCV with pip instead of MIM, please follow [MMCV installation guides For example, the following command install mmcv built for PyTorch 1.10.x and CUDA 11.3. ```shell -pip install 'mmcv>=2.0.0' -f https://download.openmmlab.com/mmcv/dist/cu113/torch1.10/index.html +pip install 'mmcv>=2.0.1' -f https://download.openmmlab.com/mmcv/dist/cu113/torch1.10/index.html ``` ### Install on CPU-only platforms @@ -192,7 +192,7 @@ thus we only need to install MMEngine, MMCV and MMPose with the following comman ```shell !pip3 install openmim !mim install mmengine -!mim install "mmcv>=2.0.0" +!mim install "mmcv>=2.0.1" ``` **Step 2.** Install MMPose from the source. @@ -208,7 +208,7 @@ thus we only need to install MMEngine, MMCV and MMPose with the following comman ```python import mmpose print(mmpose.__version__) -# Example output: 1.0.0 +# Example output: 1.1.0 ``` ```{note} diff --git a/docs/src/papers/datasets/animalkingdom.md b/docs/src/papers/datasets/animalkingdom.md index 3aa8592331..64b5fe375a 100644 --- a/docs/src/papers/datasets/animalkingdom.md +++ b/docs/src/papers/datasets/animalkingdom.md @@ -6,8 +6,7 @@ Animal Kingdom (CVPR'2022) ```bibtex -@InProceedings{ - Ng_2022_CVPR, +@InProceedings{Ng_2022_CVPR, author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, diff --git a/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md b/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md index 21106100db..28b0b726b4 100644 --- a/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md +++ b/docs/zh_cn/dataset_zoo/2d_animal_keypoint.md @@ -490,8 +490,7 @@ mmpose ```bibtex -@InProceedings{ - Ng_2022_CVPR, +@inproceedings{Ng_2022_CVPR, author = {Ng, Xun Long and Ong, Kian Eng and Zheng, Qichen and Ni, Yun and Yeo, Si Yong and Liu, Jun}, title = {Animal Kingdom: A Large and Diverse Dataset for Animal Behavior Understanding}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, diff --git a/docs/zh_cn/faq.md b/docs/zh_cn/faq.md index 509fb2d4b4..b1e6998396 100644 --- a/docs/zh_cn/faq.md +++ b/docs/zh_cn/faq.md @@ -19,6 +19,7 @@ Detailed compatible MMPose and MMCV versions are shown as below. Please choose t | MMPose version | MMCV/MMEngine version | | :------------: | :-----------------------------: | +| 1.1.0 | mmcv>=2.0.1, mmengine>=0.8.0 | | 1.0.0 | mmcv>=2.0.0, mmengine>=0.7.0 | | 1.0.0rc1 | mmcv>=2.0.0rc4, mmengine>=0.6.0 | | 1.0.0rc0 | mmcv>=2.0.0rc0, mmengine>=0.0.1 | diff --git a/docs/zh_cn/installation.md b/docs/zh_cn/installation.md index 3e9a709e9e..ef515c8030 100644 --- a/docs/zh_cn/installation.md +++ b/docs/zh_cn/installation.md @@ -21,7 +21,7 @@ 在本节中,我们将演示如何准备 PyTorch 相关的依赖环境。 -MMPose 适用于 Linux、Windows 和 macOS。它需要 Python 3.7+、CUDA 9.2+ 和 PyTorch 1.6+。 +MMPose 适用于 Linux、Windows 和 macOS。它需要 Python 3.7+、CUDA 9.2+ 和 PyTorch 1.8+。 如果您对配置 PyTorch 环境已经很熟悉,并且已经完成了配置,可以直接进入下一节:[安装](#安装-mmpose)。否则,请依照以下步骤完成配置。 @@ -57,13 +57,13 @@ conda install pytorch torchvision cpuonly -c pytorch ```shell pip install -U openmim mim install mmengine -mim install "mmcv>=2.0.0" +mim install "mmcv>=2.0.1" ``` 请注意,MMPose 中的一些推理示例脚本需要使用 [MMDetection](https://github.com/open-mmlab/mmdetection) (mmdet) 检测人体。如果您想运行这些示例脚本,可以通过运行以下命令安装 mmdet: ```shell -mim install "mmdet>=3.0.0" +mim install "mmdet>=3.1.0" ``` ## 最佳实践 @@ -88,7 +88,7 @@ pip install -v -e . 如果只是希望调用 MMPose 的接口,或者在自己的项目中导入 MMPose 中的模块。直接使用 mim 安装即可。 ```shell -mim install "mmpose>=1.0.0" +mim install "mmpose>=1.1.0" ``` ## 验证安装 @@ -180,7 +180,7 @@ MMCV 包含 C++ 和 CUDA 扩展,因此其对 PyTorch 的依赖比较复杂。M 举个例子,如下命令将会安装基于 PyTorch 1.10.x 和 CUDA 11.3 编译的 mmcv。 ```shell -pip install 'mmcv>=2.0.0' -f https://download.openmmlab.com/mmcv/dist/cu113/torch1.10/index.html +pip install 'mmcv>=2.0.1' -f https://download.openmmlab.com/mmcv/dist/cu113/torch1.10/index.html ``` ### 在 CPU 环境中安装 @@ -198,7 +198,7 @@ MMPose 可以仅在 CPU 环境中安装,在 CPU 模式下,您可以完成训 ```shell !pip3 install openmim !mim install mmengine -!mim install "mmcv>=2.0.0" +!mim install "mmcv>=2.0.1" ``` **第 2 步** 从源码安装 mmpose @@ -214,7 +214,7 @@ MMPose 可以仅在 CPU 环境中安装,在 CPU 模式下,您可以完成训 ```python import mmpose print(mmpose.__version__) -# 预期输出: 1.0.0 +# 预期输出: 1.1.0 ``` ```{note} diff --git a/docs/zh_cn/notes/changelog.md b/docs/zh_cn/notes/changelog.md index 942d3d515b..68beeeb069 100644 --- a/docs/zh_cn/notes/changelog.md +++ b/docs/zh_cn/notes/changelog.md @@ -1,5 +1,92 @@ # Changelog +## **v1.0.0rc1 (14/10/2022)** + +**Highlights** + +- Release RTMPose, a high-performance real-time pose estimation algorithm with cross-platform deployment and inference support. See details at the [project page](/projects/rtmpose/) +- Support several new algorithms: ViTPose (arXiv'2022), CID (CVPR'2022), DEKR (CVPR'2021) +- Add Inferencer, a convenient inference interface that perform pose estimation and visualization on images, videos and webcam streams with only one line of code +- Introduce *Project*, a new form for rapid and easy implementation of new algorithms and features in MMPose, which is more handy for community contributors + +**New Features** + +- Support RTMPose ([#1971](https://github.com/open-mmlab/mmpose/pull/1971), [#2024](https://github.com/open-mmlab/mmpose/pull/2024), [#2028](https://github.com/open-mmlab/mmpose/pull/2028), [#2030](https://github.com/open-mmlab/mmpose/pull/2030), [#2040](https://github.com/open-mmlab/mmpose/pull/2040), [#2057](https://github.com/open-mmlab/mmpose/pull/2057)) +- Support Inferencer ([#1969](https://github.com/open-mmlab/mmpose/pull/1969)) +- Support ViTPose ([#1876](https://github.com/open-mmlab/mmpose/pull/1876), [#2056](https://github.com/open-mmlab/mmpose/pull/2056), [#2058](https://github.com/open-mmlab/mmpose/pull/2058), [#2065](https://github.com/open-mmlab/mmpose/pull/2065)) +- Support CID ([#1907](https://github.com/open-mmlab/mmpose/pull/1907)) +- Support DEKR ([#1834](https://github.com/open-mmlab/mmpose/pull/1834), [#1901](https://github.com/open-mmlab/mmpose/pull/1901)) +- Support training with multiple datasets ([#1767](https://github.com/open-mmlab/mmpose/pull/1767), [#1930](https://github.com/open-mmlab/mmpose/pull/1930), [#1938](https://github.com/open-mmlab/mmpose/pull/1938), [#2025](https://github.com/open-mmlab/mmpose/pull/2025)) +- Add *project* to allow rapid and easy implementation of new models and features ([#1914](https://github.com/open-mmlab/mmpose/pull/1914)) + +**Improvements** + +- Improve documentation quality ([#1846](https://github.com/open-mmlab/mmpose/pull/1846), [#1858](https://github.com/open-mmlab/mmpose/pull/1858), [#1872](https://github.com/open-mmlab/mmpose/pull/1872), [#1899](https://github.com/open-mmlab/mmpose/pull/1899), [#1925](https://github.com/open-mmlab/mmpose/pull/1925), [#1945](https://github.com/open-mmlab/mmpose/pull/1945), [#1952](https://github.com/open-mmlab/mmpose/pull/1952), [#1990](https://github.com/open-mmlab/mmpose/pull/1990), [#2023](https://github.com/open-mmlab/mmpose/pull/2023), [#2042](https://github.com/open-mmlab/mmpose/pull/2042)) +- Support visualizing keypoint indices ([#2051](https://github.com/open-mmlab/mmpose/pull/2051)) +- Support OpenPose style visualization ([#2055](https://github.com/open-mmlab/mmpose/pull/2055)) +- Accelerate image transpose in data pipelines with tensor operation ([#1976](https://github.com/open-mmlab/mmpose/pull/1976)) +- Support auto-import modules from registry ([#1961](https://github.com/open-mmlab/mmpose/pull/1961)) +- Support keypoint partition metric ([#1944](https://github.com/open-mmlab/mmpose/pull/1944)) +- Support SimCC 1D-heatmap visualization ([#1912](https://github.com/open-mmlab/mmpose/pull/1912)) +- Support saving predictions and data metainfo in demos ([#1814](https://github.com/open-mmlab/mmpose/pull/1814), [#1879](https://github.com/open-mmlab/mmpose/pull/1879)) +- Support SimCC with DARK ([#1870](https://github.com/open-mmlab/mmpose/pull/1870)) +- Remove Gaussian blur for offset maps in UDP-regress ([#1815](https://github.com/open-mmlab/mmpose/pull/1815)) +- Refactor encoding interface of Codec for better extendibility and easier configuration ([#1781](https://github.com/open-mmlab/mmpose/pull/1781)) +- Support evaluating CocoMetric without annotation file ([#1722](https://github.com/open-mmlab/mmpose/pull/1722)) +- Improve unit tests ([#1765](https://github.com/open-mmlab/mmpose/pull/1765)) + +**Bug Fixes** + +- Fix repeated warnings from different ranks ([#2053](https://github.com/open-mmlab/mmpose/pull/2053)) +- Avoid frequent scope switching when using mmdet inference api ([#2039](https://github.com/open-mmlab/mmpose/pull/2039)) +- Remove EMA parameters and message hub data when publishing model checkpoints ([#2036](https://github.com/open-mmlab/mmpose/pull/2036)) +- Fix metainfo copying in dataset class ([#2017](https://github.com/open-mmlab/mmpose/pull/2017)) +- Fix top-down demo bug when there is no object detected ([#2007](https://github.com/open-mmlab/mmpose/pull/2007)) +- Fix config errors ([#1882](https://github.com/open-mmlab/mmpose/pull/1882), [#1906](https://github.com/open-mmlab/mmpose/pull/1906), [#1995](https://github.com/open-mmlab/mmpose/pull/1995)) +- Fix image demo failure when GUI is unavailable ([#1968](https://github.com/open-mmlab/mmpose/pull/1968)) +- Fix bug in AdaptiveWingLoss ([#1953](https://github.com/open-mmlab/mmpose/pull/1953)) +- Fix incorrect importing of RepeatDataset which is deprecated ([#1943](https://github.com/open-mmlab/mmpose/pull/1943)) +- Fix bug in bottom-up datasets that ignores images without instances ([#1752](https://github.com/open-mmlab/mmpose/pull/1752), [#1936](https://github.com/open-mmlab/mmpose/pull/1936)) +- Fix upstream dependency issues ([#1867](https://github.com/open-mmlab/mmpose/pull/1867), [#1921](https://github.com/open-mmlab/mmpose/pull/1921)) +- Fix evaluation issues and update results ([#1763](https://github.com/open-mmlab/mmpose/pull/1763), [#1773](https://github.com/open-mmlab/mmpose/pull/1773), [#1780](https://github.com/open-mmlab/mmpose/pull/1780), [#1850](https://github.com/open-mmlab/mmpose/pull/1850), [#1868](https://github.com/open-mmlab/mmpose/pull/1868)) +- Fix local registry missing warnings ([#1849](https://github.com/open-mmlab/mmpose/pull/1849)) +- Remove deprecated scripts for model deployment ([#1845](https://github.com/open-mmlab/mmpose/pull/1845)) +- Fix a bug in input transformation in BaseHead ([#1843](https://github.com/open-mmlab/mmpose/pull/1843)) +- Fix an interface mismatch with MMDetection in webcam demo ([#1813](https://github.com/open-mmlab/mmpose/pull/1813)) +- Fix a bug in heatmap visualization that causes incorrect scale ([#1800](https://github.com/open-mmlab/mmpose/pull/1800)) +- Add model metafiles ([#1768](https://github.com/open-mmlab/mmpose/pull/1768)) + +## **v1.0.0rc0 (14/10/2022)** + +**New Features** + +- Support 4 light-weight pose estimation algorithms: [SimCC](https://doi.org/10.48550/arxiv.2107.03332) (ECCV'2022), [Debias-IPR](https://openaccess.thecvf.com/content/ICCV2021/papers/Gu_Removing_the_Bias_of_Integral_Pose_Regression_ICCV_2021_paper.pdf) (ICCV'2021), [IPR](https://arxiv.org/abs/1711.08229) (ECCV'2018), and [DSNT](https://arxiv.org/abs/1801.07372v2) (ArXiv'2018) ([#1628](https://github.com/open-mmlab/mmpose/pull/1628)) + +**Migrations** + +- Add Webcam API in MMPose 1.0 ([#1638](https://github.com/open-mmlab/mmpose/pull/1638), [#1662](https://github.com/open-mmlab/mmpose/pull/1662)) @Ben-Louis +- Add codec for Associative Embedding (beta) ([#1603](https://github.com/open-mmlab/mmpose/pull/1603)) @ly015 + +**Improvements** + +- Add a colab tutorial for MMPose 1.0 ([#1660](https://github.com/open-mmlab/mmpose/pull/1660)) @Tau-J +- Add model index in config folder ([#1710](https://github.com/open-mmlab/mmpose/pull/1710), [#1709](https://github.com/open-mmlab/mmpose/pull/1709), [#1627](https://github.com/open-mmlab/mmpose/pull/1627)) @ly015, @Tau-J, @Ben-Louis +- Update and improve documentation ([#1692](https://github.com/open-mmlab/mmpose/pull/1692), [#1656](https://github.com/open-mmlab/mmpose/pull/1656), [#1681](https://github.com/open-mmlab/mmpose/pull/1681), [#1677](https://github.com/open-mmlab/mmpose/pull/1677), [#1664](https://github.com/open-mmlab/mmpose/pull/1664), [#1659](https://github.com/open-mmlab/mmpose/pull/1659)) @Tau-J, @Ben-Louis, @liqikai9 +- Improve config structures and formats ([#1651](https://github.com/open-mmlab/mmpose/pull/1651)) @liqikai9 + +**Bug Fixes** + +- Update mmengine version requirements ([#1715](https://github.com/open-mmlab/mmpose/pull/1715)) @Ben-Louis +- Update dependencies of pre-commit hooks ([#1705](https://github.com/open-mmlab/mmpose/pull/1705)) @Ben-Louis +- Fix mmcv version in DockerFile ([#1704](https://github.com/open-mmlab/mmpose/pull/1704)) +- Fix a bug in setting dataset metainfo in configs ([#1684](https://github.com/open-mmlab/mmpose/pull/1684)) @ly015 +- Fix a bug in UDP training ([#1682](https://github.com/open-mmlab/mmpose/pull/1682)) @liqikai9 +- Fix a bug in Dark decoding ([#1676](https://github.com/open-mmlab/mmpose/pull/1676)) @liqikai9 +- Fix bugs in visualization ([#1671](https://github.com/open-mmlab/mmpose/pull/1671), [#1668](https://github.com/open-mmlab/mmpose/pull/1668), [#1657](https://github.com/open-mmlab/mmpose/pull/1657)) @liqikai9, @Ben-Louis +- Fix incorrect flops calculation ([#1669](https://github.com/open-mmlab/mmpose/pull/1669)) @liqikai9 +- Fix `tensor.tile` compatibility issue for pytorch 1.6 ([#1658](https://github.com/open-mmlab/mmpose/pull/1658)) @ly015 +- Fix compatibility with `MultilevelPixelData` ([#1647](https://github.com/open-mmlab/mmpose/pull/1647)) @liqikai9 + ## **v1.0.0beta (1/09/2022)** We are excited to announce the release of MMPose 1.0.0beta. diff --git a/mmpose/version.py b/mmpose/version.py index 73312cc28d..bf58664b39 100644 --- a/mmpose/version.py +++ b/mmpose/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '1.0.0' +__version__ = '1.1.0' short_version = __version__ From 2ab56bc0e7a20b6771681e9e423bc60982233b7a Mon Sep 17 00:00:00 2001 From: Peng Lu Date: Tue, 4 Jul 2023 20:48:40 +0800 Subject: [PATCH 52/52] [Refactor] Remove webcam API (#2511) --- demo/docs/en/webcam_api_demo.md | 110 +---- demo/docs/zh_cn/webcam_api_demo.md | 117 +---- demo/webcam_api_demo.py | 76 ---- demo/webcam_cfg/human_animal_pose.py | 137 ------ demo/webcam_cfg/human_pose.py | 102 ----- demo/webcam_cfg/test_camera.py | 22 - docs/en/api.rst | 2 - docs/en/webcam_api.rst | 112 ----- docs/zh_cn/api.rst | 2 - docs/zh_cn/webcam_api.rst | 112 ----- mmpose/apis/webcam/__init__.py | 4 - mmpose/apis/webcam/nodes/__init__.py | 15 - .../apis/webcam/nodes/base_visualizer_node.py | 65 --- .../webcam/nodes/helper_nodes/__init__.py | 6 - .../webcam/nodes/helper_nodes/monitor_node.py | 167 ------- .../helper_nodes/object_assigner_node.py | 139 ------ .../nodes/helper_nodes/recorder_node.py | 126 ------ .../apis/webcam/nodes/model_nodes/__init__.py | 5 - .../webcam/nodes/model_nodes/detector_node.py | 143 ------ .../nodes/model_nodes/pose_estimator_node.py | 135 ------ mmpose/apis/webcam/nodes/node.py | 407 ------------------ mmpose/apis/webcam/nodes/registry.py | 4 - .../webcam/nodes/visualizer_nodes/__init__.py | 10 - .../visualizer_nodes/bigeye_effect_node.py | 127 ------ .../visualizer_nodes/notice_board_node.py | 128 ------ .../object_visualizer_node.py | 341 --------------- .../sunglasses_effect_node.py | 143 ------ mmpose/apis/webcam/utils/__init__.py | 20 - mmpose/apis/webcam/utils/buffer.py | 203 --------- mmpose/apis/webcam/utils/event.py | 137 ------ mmpose/apis/webcam/utils/image_capture.py | 40 -- mmpose/apis/webcam/utils/message.py | 186 -------- mmpose/apis/webcam/utils/misc.py | 367 ---------------- mmpose/apis/webcam/utils/pose.py | 181 -------- mmpose/apis/webcam/webcam_executor.py | 329 -------------- requirements/mminstall.txt | 2 +- .../test_mmpose_inferencer.py | 5 + .../test_pose2d_inferencer.py | 5 + .../test_pose3d_inferencer.py | 5 + .../test_nodes/test_big_eye_effect_node.py | 62 --- .../test_nodes/test_detector_node.py | 85 ---- .../test_nodes/test_monitor_node.py | 67 --- .../test_nodes/test_notice_board_node.py | 61 --- .../test_nodes/test_object_assigner_node.py | 86 ---- .../test_nodes/test_object_visualizer_node.py | 80 ---- .../test_nodes/test_pose_estimator_node.py | 96 ----- .../test_nodes/test_recorder_node.py | 69 --- .../test_nodes/test_sunglasses_effect_node.py | 63 --- .../test_webcam/test_utils/test_buffer.py | 79 ---- .../test_webcam/test_utils/test_event.py | 33 -- .../test_utils/test_image_capture.py | 48 --- .../test_webcam/test_utils/test_message.py | 66 --- .../test_webcam/test_utils/test_misc.py | 70 --- .../test_webcam/test_utils/test_pose.py | 144 ------- .../test_webcam/test_webcam_executor.py | 25 -- 55 files changed, 53 insertions(+), 5318 deletions(-) delete mode 100644 demo/webcam_api_demo.py delete mode 100644 demo/webcam_cfg/human_animal_pose.py delete mode 100644 demo/webcam_cfg/human_pose.py delete mode 100644 demo/webcam_cfg/test_camera.py delete mode 100644 docs/en/webcam_api.rst delete mode 100644 docs/zh_cn/webcam_api.rst delete mode 100644 mmpose/apis/webcam/__init__.py delete mode 100644 mmpose/apis/webcam/nodes/__init__.py delete mode 100644 mmpose/apis/webcam/nodes/base_visualizer_node.py delete mode 100644 mmpose/apis/webcam/nodes/helper_nodes/__init__.py delete mode 100644 mmpose/apis/webcam/nodes/helper_nodes/monitor_node.py delete mode 100644 mmpose/apis/webcam/nodes/helper_nodes/object_assigner_node.py delete mode 100644 mmpose/apis/webcam/nodes/helper_nodes/recorder_node.py delete mode 100644 mmpose/apis/webcam/nodes/model_nodes/__init__.py delete mode 100644 mmpose/apis/webcam/nodes/model_nodes/detector_node.py delete mode 100644 mmpose/apis/webcam/nodes/model_nodes/pose_estimator_node.py delete mode 100644 mmpose/apis/webcam/nodes/node.py delete mode 100644 mmpose/apis/webcam/nodes/registry.py delete mode 100644 mmpose/apis/webcam/nodes/visualizer_nodes/__init__.py delete mode 100644 mmpose/apis/webcam/nodes/visualizer_nodes/bigeye_effect_node.py delete mode 100644 mmpose/apis/webcam/nodes/visualizer_nodes/notice_board_node.py delete mode 100644 mmpose/apis/webcam/nodes/visualizer_nodes/object_visualizer_node.py delete mode 100644 mmpose/apis/webcam/nodes/visualizer_nodes/sunglasses_effect_node.py delete mode 100644 mmpose/apis/webcam/utils/__init__.py delete mode 100644 mmpose/apis/webcam/utils/buffer.py delete mode 100644 mmpose/apis/webcam/utils/event.py delete mode 100644 mmpose/apis/webcam/utils/image_capture.py delete mode 100644 mmpose/apis/webcam/utils/message.py delete mode 100644 mmpose/apis/webcam/utils/misc.py delete mode 100644 mmpose/apis/webcam/utils/pose.py delete mode 100644 mmpose/apis/webcam/webcam_executor.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_big_eye_effect_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_detector_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_monitor_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_notice_board_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_object_assigner_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_object_visualizer_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_pose_estimator_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_recorder_node.py delete mode 100644 tests/test_apis/test_webcam/test_nodes/test_sunglasses_effect_node.py delete mode 100644 tests/test_apis/test_webcam/test_utils/test_buffer.py delete mode 100644 tests/test_apis/test_webcam/test_utils/test_event.py delete mode 100644 tests/test_apis/test_webcam/test_utils/test_image_capture.py delete mode 100644 tests/test_apis/test_webcam/test_utils/test_message.py delete mode 100644 tests/test_apis/test_webcam/test_utils/test_misc.py delete mode 100644 tests/test_apis/test_webcam/test_utils/test_pose.py delete mode 100644 tests/test_apis/test_webcam/test_webcam_executor.py diff --git a/demo/docs/en/webcam_api_demo.md b/demo/docs/en/webcam_api_demo.md index 4bbc75c261..9869392171 100644 --- a/demo/docs/en/webcam_api_demo.md +++ b/demo/docs/en/webcam_api_demo.md @@ -1,104 +1,30 @@ ## Webcam Demo -We provide a webcam demo tool which integrartes detection and 2D pose estimation for humans and animals. It can also apply fun effects like putting on sunglasses or enlarging the eyes, based on the pose estimation results. +The original Webcam API has been deprecated starting from version v1.1.0. Users now have the option to utilize either the Inferencer or the demo script for conducting pose estimation using webcam input. -
    -
    -
    +### Webcam Demo with Inferencer -### Get started - -Launch the demo from the mmpose root directory: - -```shell -# Run webcam demo with GPU -python demo/webcam_api_demo.py - -# Run webcam demo with CPU -python demo/webcam_api_demo.py --cpu -``` - -The command above will use the default config file `demo/webcam_cfg/human_pose.py`. You can also specify the config file in the command: +Users can utilize the MMPose Inferencer to estimate human poses in webcam inputs by executing the following command: ```shell -python demo/webcam_api_demo.py --config demo/webcam_cfg/human_pose.py +python demo/inferencer_demo.py webcam --pose2d 'human' ``` -### Hotkeys - -| Hotkey | Function | -| ------ | ------------------------------------- | -| v | Toggle the pose visualization on/off. | -| h | Show help information. | -| m | Show the monitoring information. | -| q | Exit. | - -Note that the demo will automatically save the output video into a file `webcam_api_demo.mp4`. +For additional information about the arguments of Inferencer, please refer to the [Inferencer Documentation](/docs/en/user_guides/inference.md). -### Usage and configuarations +### Webcam Demo with Demo Script -Detailed configurations can be found in the config file. +All of the demo scripts, except for `demo/image_demo.py`, support webcam input. -- **Configure detection models** - Users can choose detection models from the [MMDetection Model Zoo](https://mmdetection.readthedocs.io/en/3.x/model_zoo.html). Just set the `model_config` and `model_checkpoint` in the detector node accordingly, and the model will be automatically downloaded and loaded. +Take `demo/topdown_demo_with_mmdet.py` as example, users can utilize this script with webcam input by specifying **`--input webcam`** in the command: - ```python - # 'DetectorNode': - # This node performs object detection from the frame image using an - # MMDetection model. - dict( - type='DetectorNode', - name='detector', - model_config='demo/mmdetection_cfg/' - 'ssdlite_mobilenetv2-scratch_8xb24-600e_coco.py', - model_checkpoint='https://download.openmmlab.com' - '/mmdetection/v2.0/ssd/' - 'ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_' - 'scratch_600e_coco_20210629_110627-974d9307.pth', - input_buffer='_input_', - output_buffer='det_result'), - ``` - -- **Configure pose estimation models** - In this demo we use two [top-down](https://github.com/open-mmlab/mmpose/tree/latest/configs/body_2d_keypoint/topdown_heatmap) pose estimation models for humans and animals respectively. Users can choose models from the [MMPose Model Zoo](https://mmpose.readthedocs.io/en/latest/modelzoo.html). To apply different pose models on different instance types, you can add multiple pose estimator nodes with `cls_names` set accordingly. - - ```python - # 'TopdownPoseEstimatorNode': - # This node performs keypoint detection from the frame image using an - # MMPose top-down model. Detection results is needed. - dict( - type='TopdownPoseEstimatorNode', - name='human pose estimator', - model_config='configs/wholebody_2d_keypoint/' - 'topdown_heatmap/coco-wholebody/' - 'td-hm_vipnas-mbv3_dark-8xb64-210e_coco-wholebody-256x192.py', - model_checkpoint='https://download.openmmlab.com/mmpose/' - 'top_down/vipnas/vipnas_mbv3_coco_wholebody_256x192_dark' - '-e2158108_20211205.pth', - labels=['person'], - input_buffer='det_result', - output_buffer='human_pose'), - dict( - type='TopdownPoseEstimatorNode', - name='animal pose estimator', - model_config='configs/animal_2d_keypoint/topdown_heatmap/' - 'animalpose/td-hm_hrnet-w32_8xb64-210e_animalpose-256x256.py', - model_checkpoint='https://download.openmmlab.com/mmpose/animal/' - 'hrnet/hrnet_w32_animalpose_256x256-1aa7f075_20210426.pth', - labels=['cat', 'dog', 'horse', 'sheep', 'cow'], - input_buffer='human_pose', - output_buffer='animal_pose'), - ``` - -- **Run the demo on a local video file** - You can use local video files as the demo input by set `camera_id` to the file path. - -- **The computer doesn't have a camera?** - A smart phone can serve as a webcam via apps like [Camo](https://reincubate.com/camo/) or [DroidCam](https://www.dev47apps.com/). - -- **Test the camera and display** - Run follow command for a quick test of video capturing and displaying. - - ```shell - python demo/webcam_api_demo.py --config demo/webcam_cfg/test_camera.py - ``` +```shell +# inference with webcam +python demo/topdown_demo_with_mmdet.py \ + projects/rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \ + https://download.openmmlab.com/mmpose/v1/projects/rtmpose/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \ + projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \ + https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \ + --input webcam \ + --show +``` diff --git a/demo/docs/zh_cn/webcam_api_demo.md b/demo/docs/zh_cn/webcam_api_demo.md index acc1aa9b0a..66099c9ca6 100644 --- a/demo/docs/zh_cn/webcam_api_demo.md +++ b/demo/docs/zh_cn/webcam_api_demo.md @@ -1,109 +1,30 @@ -## Webcam Demo +## 摄像头推理 -我们提供了同时支持人体和动物的识别和 2D 姿态预估 webcam demo 工具,用户也可以用这个脚本在姿态预测结果上加入譬如大眼和戴墨镜等好玩的特效。 +从版本 v1.1.0 开始,原来的摄像头 API 已被弃用。用户现在可以选择使用推理器(Inferencer)或 Demo 脚本从摄像头读取的视频中进行姿势估计。 -
    -
    -
    +### 使用推理器进行摄像头推理 -### Get started - -脚本使用方式很简单,直接在 MMPose 根路径使用: - -```shell -# 使用 GPU -python demo/webcam_api_demo.py - -# 仅使用 CPU -python demo/webcam_api_demo.py --cpu -``` - -该命令会使用默认的 `demo/webcam_cfg/human_pose.py` 作为配置文件,用户可以自行指定别的配置: +用户可以通过执行以下命令来利用 MMPose Inferencer 对摄像头输入进行人体姿势估计: ```shell -python demo/webcam_api_demo.py --config demo/webcam_cfg/human_pose.py +python demo/inferencer_demo.py webcam --pose2d 'human' ``` -### Hotkeys - -| Hotkey | Function | -| ------ | ------------------------------------- | -| v | Toggle the pose visualization on/off. | -| h | Show help information. | -| m | Show the monitoring information. | -| q | Exit. | - -注意:脚本会自动将实时结果保存成一个名为 `webcam_api_demo.mp4` 的视频文件。 - -### 配置使用 - -这里我们只进行一些基本的说明,更多的信息可以直接参考对应的配置文件。 - -- **设置检测模型** +有关推理器的参数详细信息,请参阅 [推理器文档](/docs/en/user_guides/inference.md)。 - 用户可以直接使用 [MMDetection Model Zoo](https://mmdetection.readthedocs.io/en/3.x/model_zoo.html) 里的识别模型,需要注意的是确保配置文件中的 DetectorNode 里的 `model_config` 和 `model_checkpoint` 需要对应起来,这样模型就会被自动下载和加载,例如: +### 使用 Demo 脚本进行摄像头推理 - ```python - # 'DetectorNode': - # This node performs object detection from the frame image using an - # MMDetection model. - dict( - type='DetectorNode', - name='detector', - model_config='demo/mmdetection_cfg/' - 'ssdlite_mobilenetv2-scratch_8xb24-600e_coco.py', - model_checkpoint='https://download.openmmlab.com' - '/mmdetection/v2.0/ssd/' - 'ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_' - 'scratch_600e_coco_20210629_110627-974d9307.pth', - input_buffer='_input_', - output_buffer='det_result'), - ``` +除了 `demo/image_demo.py` 之外,所有的 Demo 脚本都支持摄像头输入。 -- **设置姿态预估模型** +以 `demo/topdown_demo_with_mmdet.py` 为例,用户可以通过在命令中指定 **`--input webcam`** 来使用该脚本对摄像头输入进行推理: - 这里我们用两个 [top-down](https://github.com/open-mmlab/mmpose/tree/latest/configs/body_2d_keypoint/topdown_heatmap) 结构的人体和动物姿态预估模型进行演示。用户可以自由使用 [MMPose Model Zoo](https://mmpose.readthedocs.io/zh_CN/latest/model_zoo/body_2d_keypoint.html) 里的模型。需要注意的是,更换模型后用户需要在对应的 pose estimate node 里添加或修改对应的 `cls_names` ,例如: - - ```python - # 'TopdownPoseEstimatorNode': - # This node performs keypoint detection from the frame image using an - # MMPose top-down model. Detection results is needed. - dict( - type='TopdownPoseEstimatorNode', - name='human pose estimator', - model_config='configs/wholebody_2d_keypoint/' - 'topdown_heatmap/coco-wholebody/' - 'td-hm_vipnas-mbv3_dark-8xb64-210e_coco-wholebody-256x192.py', - model_checkpoint='https://download.openmmlab.com/mmpose/' - 'top_down/vipnas/vipnas_mbv3_coco_wholebody_256x192_dark' - '-e2158108_20211205.pth', - labels=['person'], - input_buffer='det_result', - output_buffer='human_pose'), - dict( - type='TopdownPoseEstimatorNode', - name='animal pose estimator', - model_config='configs/animal_2d_keypoint/topdown_heatmap/' - 'animalpose/td-hm_hrnet-w32_8xb64-210e_animalpose-256x256.py', - model_checkpoint='https://download.openmmlab.com/mmpose/animal/' - 'hrnet/hrnet_w32_animalpose_256x256-1aa7f075_20210426.pth', - labels=['cat', 'dog', 'horse', 'sheep', 'cow'], - input_buffer='human_pose', - output_buffer='animal_pose'), - ``` - -- **使用本地视频文件** - - 如果想直接使用本地的视频文件,用户只需要把文件路径设置到 `camera_id` 就行。 - -- **本机没有摄像头怎么办** - - 用户可以在自己手机安装上一些 app 就能替代摄像头,例如 [Camo](https://reincubate.com/camo/) 和 [DroidCam](https://www.dev47apps.com/) 。 - -- **测试摄像头和显示器连接** - - 使用如下命令就能完成检测: - - ```shell - python demo/webcam_api_demo.py --config demo/webcam_cfg/test_camera.py - ``` +```shell +# inference with webcam +python demo/topdown_demo_with_mmdet.py \ + projects/rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \ + https://download.openmmlab.com/mmpose/v1/projects/rtmpose/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \ + projects/rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \ + https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \ + --input webcam \ + --show +``` diff --git a/demo/webcam_api_demo.py b/demo/webcam_api_demo.py deleted file mode 100644 index 7d7ad263b1..0000000000 --- a/demo/webcam_api_demo.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. - -import logging -import warnings -from argparse import ArgumentParser - -from mmengine import Config, DictAction - -from mmpose.apis.webcam import WebcamExecutor -from mmpose.apis.webcam.nodes import model_nodes - - -def parse_args(): - parser = ArgumentParser('Webcam executor configs') - parser.add_argument( - '--config', type=str, default='demo/webcam_cfg/human_pose.py') - parser.add_argument( - '--cfg-options', - nargs='+', - action=DictAction, - default={}, - help='Override settings in the config. The key-value pair ' - 'in xxx=yyy format will be merged into config file. For example, ' - "'--cfg-options executor_cfg.camera_id=1'") - parser.add_argument( - '--debug', action='store_true', help='Show debug information.') - parser.add_argument( - '--cpu', action='store_true', help='Use CPU for model inference.') - parser.add_argument( - '--cuda', action='store_true', help='Use GPU for model inference.') - - return parser.parse_args() - - -def set_device(cfg: Config, device: str): - """Set model device in config. - - Args: - cfg (Config): Webcam config - device (str): device indicator like "cpu" or "cuda:0" - """ - - device = device.lower() - assert device == 'cpu' or device.startswith('cuda:') - - for node_cfg in cfg.executor_cfg.nodes: - if node_cfg.type in model_nodes.__all__: - node_cfg.update(device=device) - - return cfg - - -def run(): - - warnings.warn('The Webcam API will be deprecated in future. ', - DeprecationWarning) - - args = parse_args() - cfg = Config.fromfile(args.config) - cfg.merge_from_dict(args.cfg_options) - - if args.debug: - logging.basicConfig(level=logging.DEBUG) - - if args.cpu: - cfg = set_device(cfg, 'cpu') - - if args.cuda: - cfg = set_device(cfg, 'cuda:0') - - webcam_exe = WebcamExecutor(**cfg.executor_cfg) - webcam_exe.run() - - -if __name__ == '__main__': - run() diff --git a/demo/webcam_cfg/human_animal_pose.py b/demo/webcam_cfg/human_animal_pose.py deleted file mode 100644 index 5eedc7f216..0000000000 --- a/demo/webcam_cfg/human_animal_pose.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -executor_cfg = dict( - # Basic configurations of the executor - name='Pose Estimation', - camera_id=0, - # Define nodes. - # The configuration of a node usually includes: - # 1. 'type': Node class name - # 2. 'name': Node name - # 3. I/O buffers (e.g. 'input_buffer', 'output_buffer'): specify the - # input and output buffer names. This may depend on the node class. - # 4. 'enable_key': assign a hot-key to toggle enable/disable this node. - # This may depend on the node class. - # 5. Other class-specific arguments - nodes=[ - # 'DetectorNode': - # This node performs object detection from the frame image using an - # MMDetection model. - dict( - type='DetectorNode', - name='detector', - model_config='demo/mmdetection_cfg/' - 'ssdlite_mobilenetv2-scratch_8xb24-600e_coco.py', - model_checkpoint='https://download.openmmlab.com' - '/mmdetection/v2.0/ssd/' - 'ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_' - 'scratch_600e_coco_20210629_110627-974d9307.pth', - input_buffer='_input_', # `_input_` is an executor-reserved buffer - output_buffer='det_result'), - # 'TopdownPoseEstimatorNode': - # This node performs keypoint detection from the frame image using an - # MMPose top-down model. Detection results is needed. - dict( - type='TopdownPoseEstimatorNode', - name='human pose estimator', - model_config='configs/wholebody_2d_keypoint/' - 'topdown_heatmap/coco-wholebody/' - 'td-hm_vipnas-mbv3_dark-8xb64-210e_coco-wholebody-256x192.py', - model_checkpoint='https://download.openmmlab.com/mmpose/' - 'top_down/vipnas/vipnas_mbv3_coco_wholebody_256x192_dark' - '-e2158108_20211205.pth', - labels=['person'], - input_buffer='det_result', - output_buffer='human_pose'), - dict( - type='TopdownPoseEstimatorNode', - name='animal pose estimator', - model_config='configs/animal_2d_keypoint/topdown_heatmap/' - 'animalpose/td-hm_hrnet-w32_8xb64-210e_animalpose-256x256.py', - model_checkpoint='https://download.openmmlab.com/mmpose/animal/' - 'hrnet/hrnet_w32_animalpose_256x256-1aa7f075_20210426.pth', - labels=['cat', 'dog', 'horse', 'sheep', 'cow'], - input_buffer='human_pose', - output_buffer='animal_pose'), - # 'ObjectAssignerNode': - # This node binds the latest model inference result with the current - # frame. (This means the frame image and inference result may be - # asynchronous). - dict( - type='ObjectAssignerNode', - name='object assigner', - frame_buffer='_frame_', # `_frame_` is an executor-reserved buffer - object_buffer='animal_pose', - output_buffer='frame'), - # 'ObjectVisualizerNode': - # This node draw the pose visualization result in the frame image. - # Pose results is needed. - dict( - type='ObjectVisualizerNode', - name='object visualizer', - enable_key='v', - enable=True, - show_bbox=True, - must_have_keypoint=False, - show_keypoint=True, - input_buffer='frame', - output_buffer='vis'), - # 'SunglassesNode': - # This node draw the sunglasses effect in the frame image. - # Pose results is needed. - dict( - type='SunglassesEffectNode', - name='sunglasses', - enable_key='s', - enable=False, - input_buffer='vis', - output_buffer='vis_sunglasses'), - # 'BigeyeEffectNode': - # This node draw the big-eye effetc in the frame image. - # Pose results is needed. - dict( - type='BigeyeEffectNode', - name='big-eye', - enable_key='b', - enable=False, - input_buffer='vis_sunglasses', - output_buffer='vis_bigeye'), - # 'NoticeBoardNode': - # This node show a notice board with given content, e.g. help - # information. - dict( - type='NoticeBoardNode', - name='instruction', - enable_key='h', - enable=True, - input_buffer='vis_bigeye', - output_buffer='vis_notice', - content_lines=[ - 'This is a demo for pose visualization and simple image ' - 'effects. Have fun!', '', 'Hot-keys:', - '"v": Pose estimation result visualization', - '"s": Sunglasses effect B-)', '"b": Big-eye effect 0_0', - '"h": Show help information', - '"m": Show diagnostic information', '"q": Exit' - ], - ), - # 'MonitorNode': - # This node show diagnostic information in the frame image. It can - # be used for debugging or monitoring system resource status. - dict( - type='MonitorNode', - name='monitor', - enable_key='m', - enable=False, - input_buffer='vis_notice', - output_buffer='display'), - # 'RecorderNode': - # This node save the output video into a file. - dict( - type='RecorderNode', - name='recorder', - out_video_file='webcam_api_demo.mp4', - input_buffer='display', - output_buffer='_display_' - # `_display_` is an executor-reserved buffer - ) - ]) diff --git a/demo/webcam_cfg/human_pose.py b/demo/webcam_cfg/human_pose.py deleted file mode 100644 index d1bac5722a..0000000000 --- a/demo/webcam_cfg/human_pose.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -executor_cfg = dict( - # Basic configurations of the executor - name='Pose Estimation', - camera_id=0, - # Define nodes. - # The configuration of a node usually includes: - # 1. 'type': Node class name - # 2. 'name': Node name - # 3. I/O buffers (e.g. 'input_buffer', 'output_buffer'): specify the - # input and output buffer names. This may depend on the node class. - # 4. 'enable_key': assign a hot-key to toggle enable/disable this node. - # This may depend on the node class. - # 5. Other class-specific arguments - nodes=[ - # 'DetectorNode': - # This node performs object detection from the frame image using an - # MMDetection model. - dict( - type='DetectorNode', - name='detector', - model_config='projects/rtmpose/rtmdet/person/' - 'rtmdet_nano_320-8xb32_coco-person.py', - model_checkpoint='https://download.openmmlab.com/mmpose/v1/' - 'projects/rtmpose/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth', # noqa - input_buffer='_input_', # `_input_` is an executor-reserved buffer - output_buffer='det_result'), - # 'TopdownPoseEstimatorNode': - # This node performs keypoint detection from the frame image using an - # MMPose top-down model. Detection results is needed. - dict( - type='TopdownPoseEstimatorNode', - name='human pose estimator', - model_config='projects/rtmpose/rtmpose/body_2d_keypoint/' - 'rtmpose-t_8xb256-420e_coco-256x192.py', - model_checkpoint='https://download.openmmlab.com/mmpose/v1/' - 'projects/rtmpose/rtmpose-tiny_simcc-aic-coco_pt-aic-coco_420e-256x192-cfc8f33d_20230126.pth', # noqa - labels=['person'], - input_buffer='det_result', - output_buffer='human_pose'), - # 'ObjectAssignerNode': - # This node binds the latest model inference result with the current - # frame. (This means the frame image and inference result may be - # asynchronous). - dict( - type='ObjectAssignerNode', - name='object assigner', - frame_buffer='_frame_', # `_frame_` is an executor-reserved buffer - object_buffer='human_pose', - output_buffer='frame'), - # 'ObjectVisualizerNode': - # This node draw the pose visualization result in the frame image. - # Pose results is needed. - dict( - type='ObjectVisualizerNode', - name='object visualizer', - enable_key='v', - enable=True, - show_bbox=True, - must_have_keypoint=False, - show_keypoint=True, - input_buffer='frame', - output_buffer='vis'), - # 'NoticeBoardNode': - # This node show a notice board with given content, e.g. help - # information. - dict( - type='NoticeBoardNode', - name='instruction', - enable_key='h', - enable=True, - input_buffer='vis', - output_buffer='vis_notice', - content_lines=[ - 'This is a demo for pose visualization and simple image ' - 'effects. Have fun!', '', 'Hot-keys:', - '"v": Pose estimation result visualization', - '"h": Show help information', - '"m": Show diagnostic information', '"q": Exit' - ], - ), - # 'MonitorNode': - # This node show diagnostic information in the frame image. It can - # be used for debugging or monitoring system resource status. - dict( - type='MonitorNode', - name='monitor', - enable_key='m', - enable=False, - input_buffer='vis_notice', - output_buffer='display'), - # 'RecorderNode': - # This node save the output video into a file. - dict( - type='RecorderNode', - name='recorder', - out_video_file='webcam_api_demo.mp4', - input_buffer='display', - output_buffer='_display_' - # `_display_` is an executor-reserved buffer - ) - ]) diff --git a/demo/webcam_cfg/test_camera.py b/demo/webcam_cfg/test_camera.py deleted file mode 100644 index e6d79cf6db..0000000000 --- a/demo/webcam_cfg/test_camera.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -executor_cfg = dict( - name='Test Webcam', - camera_id=0, - camera_max_fps=30, - nodes=[ - dict( - type='MonitorNode', - name='monitor', - enable_key='m', - enable=False, - input_buffer='_frame_', - output_buffer='display'), - # 'RecorderNode': - # This node save the output video into a file. - dict( - type='RecorderNode', - name='recorder', - out_video_file='webcam_api_output.mp4', - input_buffer='display', - output_buffer='_display_') - ]) diff --git a/docs/en/api.rst b/docs/en/api.rst index a75e4a451d..48819a2531 100644 --- a/docs/en/api.rst +++ b/docs/en/api.rst @@ -132,5 +132,3 @@ hooks ^^^^^^^^^^^ .. automodule:: mmpose.engine.hooks :members: - -.. include:: webcam_api.rst diff --git a/docs/en/webcam_api.rst b/docs/en/webcam_api.rst deleted file mode 100644 index ff1c127515..0000000000 --- a/docs/en/webcam_api.rst +++ /dev/null @@ -1,112 +0,0 @@ -mmpose.apis.webcam --------------------- -.. contents:: MMPose Webcam API: Tools to build simple interactive webcam applications and demos - :depth: 2 - :local: - :backlinks: top - -Executor -^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: mmpose.apis.webcam -.. autosummary:: - :toctree: generated - :nosignatures: - - WebcamExecutor - -Nodes -^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: mmpose.apis.webcam.nodes - -Base Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - Node - BaseVisualizerNode - -Model Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - DetectorNode - TopdownPoseEstimatorNode - -Visualizer Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - ObjectVisualizerNode - NoticeBoardNode - SunglassesEffectNode - BigeyeEffectNode - -Helper Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - ObjectAssignerNode - MonitorNode - RecorderNode - -Utils -^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: mmpose.apis.webcam.utils - -Buffer and Message -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - BufferManager - Message - FrameMessage - VideoEndingMessage - -Pose -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - get_eye_keypoint_ids - get_face_keypoint_ids - get_hand_keypoint_ids - get_mouth_keypoint_ids - get_wrist_keypoint_ids - -Event -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - EventManager - -Misc -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - copy_and_paste - screen_matting - expand_and_clamp - limit_max_fps - is_image_file - get_cached_file_path - load_image_from_disk_or_url - get_config_path diff --git a/docs/zh_cn/api.rst b/docs/zh_cn/api.rst index a75e4a451d..48819a2531 100644 --- a/docs/zh_cn/api.rst +++ b/docs/zh_cn/api.rst @@ -132,5 +132,3 @@ hooks ^^^^^^^^^^^ .. automodule:: mmpose.engine.hooks :members: - -.. include:: webcam_api.rst diff --git a/docs/zh_cn/webcam_api.rst b/docs/zh_cn/webcam_api.rst deleted file mode 100644 index ff1c127515..0000000000 --- a/docs/zh_cn/webcam_api.rst +++ /dev/null @@ -1,112 +0,0 @@ -mmpose.apis.webcam --------------------- -.. contents:: MMPose Webcam API: Tools to build simple interactive webcam applications and demos - :depth: 2 - :local: - :backlinks: top - -Executor -^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: mmpose.apis.webcam -.. autosummary:: - :toctree: generated - :nosignatures: - - WebcamExecutor - -Nodes -^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: mmpose.apis.webcam.nodes - -Base Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - Node - BaseVisualizerNode - -Model Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - DetectorNode - TopdownPoseEstimatorNode - -Visualizer Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - ObjectVisualizerNode - NoticeBoardNode - SunglassesEffectNode - BigeyeEffectNode - -Helper Nodes -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - :template: webcam_node_class.rst - - ObjectAssignerNode - MonitorNode - RecorderNode - -Utils -^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: mmpose.apis.webcam.utils - -Buffer and Message -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - BufferManager - Message - FrameMessage - VideoEndingMessage - -Pose -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - get_eye_keypoint_ids - get_face_keypoint_ids - get_hand_keypoint_ids - get_mouth_keypoint_ids - get_wrist_keypoint_ids - -Event -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - EventManager - -Misc -"""""""""""""""""""" -.. autosummary:: - :toctree: generated - :nosignatures: - - copy_and_paste - screen_matting - expand_and_clamp - limit_max_fps - is_image_file - get_cached_file_path - load_image_from_disk_or_url - get_config_path diff --git a/mmpose/apis/webcam/__init__.py b/mmpose/apis/webcam/__init__.py deleted file mode 100644 index 271b238c67..0000000000 --- a/mmpose/apis/webcam/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from .webcam_executor import WebcamExecutor - -__all__ = ['WebcamExecutor'] diff --git a/mmpose/apis/webcam/nodes/__init__.py b/mmpose/apis/webcam/nodes/__init__.py deleted file mode 100644 index 50f7c899d3..0000000000 --- a/mmpose/apis/webcam/nodes/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from .base_visualizer_node import BaseVisualizerNode -from .helper_nodes import MonitorNode, ObjectAssignerNode, RecorderNode -from .model_nodes import DetectorNode, TopdownPoseEstimatorNode -from .node import Node -from .registry import NODES -from .visualizer_nodes import (BigeyeEffectNode, NoticeBoardNode, - ObjectVisualizerNode, SunglassesEffectNode) - -__all__ = [ - 'BaseVisualizerNode', 'NODES', 'MonitorNode', 'ObjectAssignerNode', - 'RecorderNode', 'DetectorNode', 'TopdownPoseEstimatorNode', 'Node', - 'BigeyeEffectNode', 'NoticeBoardNode', 'ObjectVisualizerNode', - 'ObjectAssignerNode', 'SunglassesEffectNode' -] diff --git a/mmpose/apis/webcam/nodes/base_visualizer_node.py b/mmpose/apis/webcam/nodes/base_visualizer_node.py deleted file mode 100644 index 0e0ba397d4..0000000000 --- a/mmpose/apis/webcam/nodes/base_visualizer_node.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from abc import abstractmethod -from typing import Dict, List, Optional, Union - -import numpy as np - -from ..utils import FrameMessage, Message -from .node import Node - - -class BaseVisualizerNode(Node): - """Base class for nodes whose function is to create visual effects, like - visualizing model predictions, showing graphics or showing text messages. - - All subclass should implement the method ``draw()``. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str | list): The name(s) of the output buffer(s). - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - """ - - def __init__(self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True): - - super().__init__(name=name, enable_key=enable_key, enable=enable) - - # Register buffers - self.register_input_buffer(input_buffer, 'input', trigger=True) - self.register_output_buffer(output_buffer) - - def process(self, input_msgs: Dict[str, Message]) -> Union[Message, None]: - input_msg = input_msgs['input'] - - img = self.draw(input_msg) - input_msg.set_image(img) - - return input_msg - - def bypass(self, input_msgs: Dict[str, Message]) -> Union[Message, None]: - return input_msgs['input'] - - @abstractmethod - def draw(self, input_msg: FrameMessage) -> np.ndarray: - """Draw on the frame image of the input FrameMessage. - - Args: - input_msg (:obj:`FrameMessage`): The message of the frame to draw - on - - Returns: - np.array: The processed image. - """ diff --git a/mmpose/apis/webcam/nodes/helper_nodes/__init__.py b/mmpose/apis/webcam/nodes/helper_nodes/__init__.py deleted file mode 100644 index 8bb0ed9dd1..0000000000 --- a/mmpose/apis/webcam/nodes/helper_nodes/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from .monitor_node import MonitorNode -from .object_assigner_node import ObjectAssignerNode -from .recorder_node import RecorderNode - -__all__ = ['MonitorNode', 'ObjectAssignerNode', 'RecorderNode'] diff --git a/mmpose/apis/webcam/nodes/helper_nodes/monitor_node.py b/mmpose/apis/webcam/nodes/helper_nodes/monitor_node.py deleted file mode 100644 index 305490dc52..0000000000 --- a/mmpose/apis/webcam/nodes/helper_nodes/monitor_node.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from typing import Dict, List, Optional, Union - -import cv2 -import numpy as np -from mmcv import color_val - -from ..node import Node -from ..registry import NODES - -try: - import psutil - psutil_proc = psutil.Process() -except (ImportError, ModuleNotFoundError): - psutil_proc = None - - -@NODES.register_module() -class MonitorNode(Node): - """Show diagnostic information. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - x_offset (int): The position of the text box's left border in - pixels. Default: 20 - y_offset (int): The position of the text box's top border in - pixels. Default: 20 - y_delta (int): The line height in pixels. Default: 15 - text_color (str|tuple): The font color represented in a color name or - a BGR tuple. Default: ``'black'`` - backbround_color (str|tuple): The background color represented in a - color name or a BGR tuple. Default: (255, 183, 0) - text_scale (float): The font scale factor that is multiplied by the - base size. Default: 0.4 - ignore_items (list[str], optional): Specify the node information items - that will not be shown. See ``MonitorNode._default_ignore_items`` - for the default setting. - - Example:: - >>> cfg = dict( - ... type='MonitorNode', - ... name='monitor', - ... enable_key='m', - ... enable=False, - ... input_buffer='vis_notice', - ... output_buffer='display') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - _default_ignore_items = ['timestamp'] - - def __init__(self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = False, - x_offset=20, - y_offset=20, - y_delta=15, - text_color='black', - background_color=(255, 183, 0), - text_scale=0.4, - ignore_items: Optional[List[str]] = None): - super().__init__(name=name, enable_key=enable_key, enable=enable) - - self.x_offset = x_offset - self.y_offset = y_offset - self.y_delta = y_delta - self.text_color = color_val(text_color) - self.background_color = color_val(background_color) - self.text_scale = text_scale - if ignore_items is None: - self.ignore_items = self._default_ignore_items - else: - self.ignore_items = ignore_items - - self.register_input_buffer(input_buffer, 'input', trigger=True) - self.register_output_buffer(output_buffer) - - def process(self, input_msgs): - input_msg = input_msgs['input'] - - input_msg.update_route_info( - node_name='System Info', - node_type='none', - info=self._get_system_info()) - - img = input_msg.get_image() - route_info = input_msg.get_route_info() - img = self._show_route_info(img, route_info) - - input_msg.set_image(img) - return input_msg - - def _get_system_info(self): - """Get the system information including CPU and memory usage. - - Returns: - dict: The system information items. - """ - sys_info = {} - if psutil_proc is not None: - sys_info['CPU(%)'] = psutil_proc.cpu_percent() - sys_info['Memory(%)'] = psutil_proc.memory_percent() - return sys_info - - def _show_route_info(self, img: np.ndarray, - route_info: List[Dict]) -> np.ndarray: - """Show the route information in the frame. - - Args: - img (np.ndarray): The frame image. - route_info (list[dict]): The route information of the frame. - - Returns: - np.ndarray: The processed image. - """ - canvas = np.full(img.shape, self.background_color, dtype=img.dtype) - - x = self.x_offset - y = self.y_offset - - max_len = 0 - - def _put_line(line=''): - nonlocal y, max_len - cv2.putText(canvas, line, (x, y), cv2.FONT_HERSHEY_DUPLEX, - self.text_scale, self.text_color, 1) - y += self.y_delta - max_len = max(max_len, len(line)) - - for node_info in route_info: - title = f'{node_info["node"]}({node_info["node_type"]})' - _put_line(title) - for k, v in node_info['info'].items(): - if k in self.ignore_items: - continue - if isinstance(v, float): - v = f'{v:.1f}' - _put_line(f' {k}: {v}') - - x1 = max(0, self.x_offset) - x2 = min(img.shape[1], int(x + max_len * self.text_scale * 20)) - y1 = max(0, self.y_offset - self.y_delta) - y2 = min(img.shape[0], y) - - src1 = canvas[y1:y2, x1:x2] - src2 = img[y1:y2, x1:x2] - img[y1:y2, x1:x2] = cv2.addWeighted(src1, 0.5, src2, 0.5, 0) - - return img - - def bypass(self, input_msgs): - return input_msgs['input'] diff --git a/mmpose/apis/webcam/nodes/helper_nodes/object_assigner_node.py b/mmpose/apis/webcam/nodes/helper_nodes/object_assigner_node.py deleted file mode 100644 index a1a7804ab4..0000000000 --- a/mmpose/apis/webcam/nodes/helper_nodes/object_assigner_node.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import time -from typing import List, Union - -from mmpose.utils.timer import RunningAverage -from ..node import Node -from ..registry import NODES - - -@NODES.register_module() -class ObjectAssignerNode(Node): - """Assign the object information to the frame message. - - :class:`ObjectAssignerNode` enables asynchronous processing of model - inference and video I/O, so the video will be captured and displayed - smoothly regardless of the model inference speed. Specifically, - :class:`ObjectAssignerNode` takes messages from both model branch and - video I/O branch as its input, indicated as "object message" and "frame - message" respectively. When an object message arrives it will update the - latest object information; and when a frame message arrives, it will be - assigned with the latest object information and output. - - Specially, if the webcam executor is set to synchrounous mode, the - behavior of :class:`ObjectAssignerNode` will be different: When an object - message arrives, it will trigger an output of itself; and the frame - messages will be ignored. - - Args: - name (str): The node name (also thread name) - frame_buffer (str): Buffer name for frame messages - object_buffer (str): Buffer name for object messages - output_buffer (str): The name(s) of the output buffer(s) - - Example:: - >>> cfg =dict( - ... type='ObjectAssignerNode', - ... name='object assigner', - ... frame_buffer='_frame_', - ... # `_frame_` is an executor-reserved buffer - ... object_buffer='animal_pose', - ... output_buffer='frame') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - def __init__(self, name: str, frame_buffer: str, object_buffer: str, - output_buffer: Union[str, List[str]]): - super().__init__(name=name, enable=True) - self.synchronous = None - - # Cache the latest model result - self.last_object_msg = None - self.last_output_msg = None - - # Inference speed analysis - self.frame_fps = RunningAverage(window=10) - self.frame_lag = RunningAverage(window=10) - self.object_fps = RunningAverage(window=10) - self.object_lag = RunningAverage(window=10) - - # Register buffers - # The trigger buffer depends on the executor.synchronous attribute, - # so it will be set later after the executor is assigned in - # ``set_executor``. - self.register_input_buffer(object_buffer, 'object', trigger=False) - self.register_input_buffer(frame_buffer, 'frame', trigger=False) - self.register_output_buffer(output_buffer) - - def set_executor(self, executor): - super().set_executor(executor) - # Set synchronous according to the executor - if executor.synchronous: - self.synchronous = True - trigger = 'object' - else: - self.synchronous = False - trigger = 'frame' - - # Set trigger input buffer according to the synchronous setting - for buffer_info in self._input_buffers: - if buffer_info.input_name == trigger: - buffer_info.trigger = True - - def process(self, input_msgs): - object_msg = input_msgs['object'] - - # Update last result - if object_msg is not None: - # Update result FPS - if self.last_object_msg is not None: - self.object_fps.update( - 1.0 / - (object_msg.timestamp - self.last_object_msg.timestamp)) - # Update inference latency - self.object_lag.update(time.time() - object_msg.timestamp) - # Update last inference result - self.last_object_msg = object_msg - - if not self.synchronous: - # Asynchronous mode: - # Assign the latest object information to the - # current frame. - frame_msg = input_msgs['frame'] - - self.frame_lag.update(time.time() - frame_msg.timestamp) - - # Assign objects to frame - if self.last_object_msg is not None: - frame_msg.update_objects(self.last_object_msg.get_objects()) - frame_msg.merge_route_info( - self.last_object_msg.get_route_info()) - - output_msg = frame_msg - - else: - # Synchronous mode: - # The current frame will be ignored. Instead, - # the frame from which the latest object information is obtained - # will be used. - self.frame_lag.update(time.time() - object_msg.timestamp) - output_msg = object_msg - - # Update frame fps and lag - if self.last_output_msg is not None: - self.frame_lag.update(time.time() - output_msg.timestamp) - self.frame_fps.update( - 1.0 / (output_msg.timestamp - self.last_output_msg.timestamp)) - self.last_output_msg = output_msg - - return output_msg - - def _get_node_info(self): - info = super()._get_node_info() - info['object_fps'] = self.object_fps.average() - info['object_lag (ms)'] = self.object_lag.average() * 1000 - info['frame_fps'] = self.frame_fps.average() - info['frame_lag (ms)'] = self.frame_lag.average() * 1000 - return info diff --git a/mmpose/apis/webcam/nodes/helper_nodes/recorder_node.py b/mmpose/apis/webcam/nodes/helper_nodes/recorder_node.py deleted file mode 100644 index b35a778692..0000000000 --- a/mmpose/apis/webcam/nodes/helper_nodes/recorder_node.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from queue import Full, Queue -from threading import Thread -from typing import List, Union - -import cv2 - -from ..node import Node -from ..registry import NODES - - -@NODES.register_module() -class RecorderNode(Node): - """Record the video frames into a local file. - - :class:`RecorderNode` uses OpenCV backend to record the video. Recording - is performed in a separate thread to avoid blocking the data stream. A - buffer queue is used to cached the arrived frame images. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - out_video_file (str): The path of the output video file - out_video_fps (int): The frame rate of the output video. Default: 30 - out_video_codec (str): The codec of the output video. Default: 'mp4v' - buffer_size (int): Size of the buffer queue that caches the arrived - frame images. - enable (bool): Default enable/disable status. Default: ``True``. - - Example:: - >>> cfg = dict( - ... type='RecorderNode', - ... name='recorder', - ... out_video_file='webcam_demo.mp4', - ... input_buffer='display', - ... output_buffer='_display_' - ... # `_display_` is an executor-reserved buffer - ... ) - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - def __init__( - self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - out_video_file: str, - out_video_fps: int = 30, - out_video_codec: str = 'mp4v', - buffer_size: int = 30, - enable: bool = True, - ): - super().__init__(name=name, enable_key=None, enable=enable) - - self.queue = Queue(maxsize=buffer_size) - self.out_video_file = out_video_file - self.out_video_fps = out_video_fps - self.out_video_codec = out_video_codec - self.vwriter = None - - # Register buffers - self.register_input_buffer(input_buffer, 'input', trigger=True) - self.register_output_buffer(output_buffer) - - # Start a new thread to write frame - self.t_record = Thread(target=self._record, args=(), daemon=True) - self.t_record.start() - - def process(self, input_msgs): - - input_msg = input_msgs['input'] - img = input_msg.get_image() if input_msg is not None else None - img_queued = False - - while not img_queued: - try: - self.queue.put(img, timeout=1) - img_queued = True - self.logger.info('Recorder received one frame.') - except Full: - self.logger.warn('Recorder jamed!') - - return input_msg - - def _record(self): - """This method is used to create a thread to get frame images from the - buffer queue and write them into the file.""" - - while True: - - img = self.queue.get() - - if img is None: - break - - if self.vwriter is None: - fourcc = cv2.VideoWriter_fourcc(*self.out_video_codec) - fps = self.out_video_fps - frame_size = (img.shape[1], img.shape[0]) - self.vwriter = cv2.VideoWriter(self.out_video_file, fourcc, - fps, frame_size) - assert self.vwriter.isOpened() - - self.vwriter.write(img) - - self.logger.info('Recorder released.') - if self.vwriter is not None: - self.vwriter.release() - - def on_exit(self): - try: - # Try putting a None into the output queue so the self.vwriter will - # be released after all queue frames have been written to file. - self.queue.put(None, timeout=1) - self.t_record.join(timeout=1) - except Full: - pass - - if self.t_record.is_alive(): - # Force to release self.vwriter - self.logger.warn('Recorder forced release!') - if self.vwriter is not None: - self.vwriter.release() diff --git a/mmpose/apis/webcam/nodes/model_nodes/__init__.py b/mmpose/apis/webcam/nodes/model_nodes/__init__.py deleted file mode 100644 index a9a116bfec..0000000000 --- a/mmpose/apis/webcam/nodes/model_nodes/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from .detector_node import DetectorNode -from .pose_estimator_node import TopdownPoseEstimatorNode - -__all__ = ['DetectorNode', 'TopdownPoseEstimatorNode'] diff --git a/mmpose/apis/webcam/nodes/model_nodes/detector_node.py b/mmpose/apis/webcam/nodes/model_nodes/detector_node.py deleted file mode 100644 index 350831fe62..0000000000 --- a/mmpose/apis/webcam/nodes/model_nodes/detector_node.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from typing import Dict, List, Optional, Union - -import numpy as np - -from mmpose.utils import adapt_mmdet_pipeline -from ...utils import get_config_path -from ..node import Node -from ..registry import NODES - -try: - from mmdet.apis import inference_detector, init_detector - has_mmdet = True -except (ImportError, ModuleNotFoundError): - has_mmdet = False - - -@NODES.register_module() -class DetectorNode(Node): - """Detect objects from the frame image using MMDetection model. - - Note that MMDetection is required for this node. Please refer to - `MMDetection documentation `_ for the installation guide. - - Parameters: - name (str): The node name (also thread name) - model_cfg (str): The model config file - model_checkpoint (str): The model checkpoint file - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - device (str): Specify the device to hold model weights and inference - the model. Default: ``'cuda:0'`` - bbox_thr (float): Set a threshold to filter out objects with low bbox - scores. Default: 0.5 - multi_input (bool): Whether load all frames in input buffer. If True, - all frames in buffer will be loaded and stacked. The latest frame - is used to detect objects of interest. Default: False - - Example:: - >>> cfg = dict( - ... type='DetectorNode', - ... name='detector', - ... model_config='demo/mmdetection_cfg/' - ... 'ssdlite_mobilenetv2_scratch_600e_coco.py', - ... model_checkpoint='https://download.openmmlab.com' - ... '/mmdetection/v2.0/ssd/' - ... 'ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_' - ... 'scratch_600e_coco_20210629_110627-974d9307.pth', - ... # `_input_` is an executor-reserved buffer - ... input_buffer='_input_', - ... output_buffer='det_result') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - def __init__(self, - name: str, - model_config: str, - model_checkpoint: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True, - device: str = 'cuda:0', - bbox_thr: float = 0.5, - multi_input: bool = False): - # Check mmdetection is installed - assert has_mmdet, \ - f'MMDetection is required for {self.__class__.__name__}.' - - super().__init__( - name=name, - enable_key=enable_key, - enable=enable, - multi_input=multi_input) - - self.model_config = get_config_path(model_config, 'mmdet') - self.model_checkpoint = model_checkpoint - self.device = device.lower() - self.bbox_thr = bbox_thr - - # Init model - self.model = init_detector( - self.model_config, self.model_checkpoint, device=self.device) - self.model.cfg = adapt_mmdet_pipeline(self.model.cfg) - - # Register buffers - self.register_input_buffer(input_buffer, 'input', trigger=True) - self.register_output_buffer(output_buffer) - - def bypass(self, input_msgs): - return input_msgs['input'] - - def process(self, input_msgs): - input_msg = input_msgs['input'] - - if self.multi_input: - imgs = [frame.get_image() for frame in input_msg] - input_msg = input_msg[-1] - - img = input_msg.get_image() - - preds = inference_detector(self.model, img) - objects = self._post_process(preds) - input_msg.update_objects(objects) - - if self.multi_input: - input_msg.set_image(np.stack(imgs, axis=0)) - - return input_msg - - def _post_process(self, preds) -> List[Dict]: - """Post-process the predictions of MMDetection model.""" - instances = preds.pred_instances.cpu().numpy() - - classes = self.model.dataset_meta['classes'] - if isinstance(classes, str): - classes = (classes, ) - - objects = [] - for i in range(len(instances)): - if instances.scores[i] < self.bbox_thr: - continue - class_id = instances.labels[i] - obj = { - 'class_id': class_id, - 'label': classes[class_id], - 'bbox': instances.bboxes[i], - 'det_model_cfg': self.model.cfg, - 'dataset_meta': self.model.dataset_meta.copy(), - } - objects.append(obj) - return objects diff --git a/mmpose/apis/webcam/nodes/model_nodes/pose_estimator_node.py b/mmpose/apis/webcam/nodes/model_nodes/pose_estimator_node.py deleted file mode 100644 index 64691cf560..0000000000 --- a/mmpose/apis/webcam/nodes/model_nodes/pose_estimator_node.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from dataclasses import dataclass -from typing import List, Optional, Union - -import numpy as np - -from mmpose.apis import inference_topdown, init_model -from ...utils import get_config_path -from ..node import Node -from ..registry import NODES - - -@dataclass -class TrackInfo: - """Dataclass for object tracking information.""" - next_id: int = 0 - last_objects: List = None - - -@NODES.register_module() -class TopdownPoseEstimatorNode(Node): - """Perform top-down pose estimation using MMPose model. - - The node should be placed after an object detection node. - - Parameters: - name (str): The node name (also thread name) - model_cfg (str): The model config file - model_checkpoint (str): The model checkpoint file - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - device (str): Specify the device to hold model weights and inference - the model. Default: ``'cuda:0'`` - class_ids (list[int], optional): Specify the object category indices - to apply pose estimation. If both ``class_ids`` and ``labels`` - are given, ``labels`` will be ignored. If neither is given, pose - estimation will be applied for all objects. Default: ``None`` - labels (list[str], optional): Specify the object category names to - apply pose estimation. See also ``class_ids``. Default: ``None`` - bbox_thr (float): Set a threshold to filter out objects with low bbox - scores. Default: 0.5 - - Example:: - >>> cfg = dict( - ... type='TopdownPoseEstimatorNode', - ... name='human pose estimator', - ... model_config='configs/wholebody/2d_kpt_sview_rgb_img/' - ... 'topdown_heatmap/coco-wholebody/' - ... 'vipnas_mbv3_coco_wholebody_256x192_dark.py', - ... model_checkpoint='https://download.openmmlab.com/mmpose/' - ... 'top_down/vipnas/vipnas_mbv3_coco_wholebody_256x192_dark' - ... '-e2158108_20211205.pth', - ... labels=['person'], - ... input_buffer='det_result', - ... output_buffer='human_pose') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - def __init__(self, - name: str, - model_config: str, - model_checkpoint: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True, - device: str = 'cuda:0', - class_ids: Optional[List[int]] = None, - labels: Optional[List[str]] = None, - bbox_thr: float = 0.5): - super().__init__(name=name, enable_key=enable_key, enable=enable) - - # Init model - self.model_config = get_config_path(model_config, 'mmpose') - self.model_checkpoint = model_checkpoint - self.device = device.lower() - - self.class_ids = class_ids - self.labels = labels - self.bbox_thr = bbox_thr - - # Init model - self.model = init_model( - self.model_config, self.model_checkpoint, device=self.device) - - # Register buffers - self.register_input_buffer(input_buffer, 'input', trigger=True) - self.register_output_buffer(output_buffer) - - def bypass(self, input_msgs): - return input_msgs['input'] - - def process(self, input_msgs): - - input_msg = input_msgs['input'] - img = input_msg.get_image() - - if self.class_ids: - objects = input_msg.get_objects( - lambda x: x.get('class_id') in self.class_ids) - elif self.labels: - objects = input_msg.get_objects( - lambda x: x.get('label') in self.labels) - else: - objects = input_msg.get_objects() - - if len(objects) > 0: - # Inference pose - bboxes = np.stack([object['bbox'] for object in objects]) - pose_results = inference_topdown(self.model, img, bboxes) - - # Update objects - for pose_result, object in zip(pose_results, objects): - pred_instances = pose_result.pred_instances - object['keypoints'] = pred_instances.keypoints[0] - object['keypoint_scores'] = pred_instances.keypoint_scores[0] - - dataset_meta = self.model.dataset_meta.copy() - dataset_meta.update(object.get('dataset_meta', dict())) - object['dataset_meta'] = dataset_meta - object['pose_model_cfg'] = self.model.cfg - - input_msg.update_objects(objects) - - return input_msg diff --git a/mmpose/apis/webcam/nodes/node.py b/mmpose/apis/webcam/nodes/node.py deleted file mode 100644 index 3d34ae1cc0..0000000000 --- a/mmpose/apis/webcam/nodes/node.py +++ /dev/null @@ -1,407 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import logging -import time -from abc import ABCMeta, abstractmethod -from dataclasses import dataclass -from threading import Thread -from typing import Callable, Dict, List, Optional, Tuple, Union - -from mmengine import is_method_overridden - -from mmpose.utils import StopWatch -from ..utils import Message, VideoEndingMessage, limit_max_fps - - -@dataclass -class BufferInfo(): - """Dataclass for buffer information.""" - buffer_name: str - input_name: Optional[str] = None - trigger: bool = False - - -@dataclass -class EventInfo(): - """Dataclass for event handler information.""" - event_name: str - is_keyboard: bool = False - handler_func: Optional[Callable] = None - - -class Node(Thread, metaclass=ABCMeta): - """Base class for node, which is the interface of basic function module. - - :class:`Node` inherits :class:`threading.Thread`. All subclasses should - override following methods: - - - ``process()`` - - ``bypass()`` (optional) - - - Parameters: - name (str): The node name (also thread name) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - max_fps (int): Maximum FPS of the node. This is to avoid the node - running unrestrictedly and causing large resource consuming. - Default: 30 - input_check_interval (float): Minimum interval (in millisecond) between - checking if input is ready. Default: 0.001 - enable (bool): Default enable/disable status. Default: ``True`` - daemon (bool): Whether node is a daemon. Default: ``True`` - multi_input (bool): Whether load all messages in buffer. If False, - only one message will be loaded each time. Default: ``False`` - """ - - def __init__(self, - name: str, - enable_key: Optional[Union[str, int]] = None, - max_fps: int = 30, - input_check_interval: float = 0.01, - enable: bool = True, - daemon: bool = False, - multi_input: bool = False): - super().__init__(name=name, daemon=daemon) - self._executor = None - self._enabled = enable - self.enable_key = enable_key - self.max_fps = max_fps - self.input_check_interval = input_check_interval - self.multi_input = multi_input - - # A partitioned buffer manager the executor's buffer manager that - # only accesses the buffers related to the node - self._buffer_manager = None - - # Input/output buffers are a list of registered buffers' information - self._input_buffers = [] - self._output_buffers = [] - - # Event manager is a copy of assigned executor's event manager - self._event_manager = None - - # A list of registered event information - # See register_event() for more information - # Note that we recommend to handle events in nodes by registering - # handlers, but one can still access the raw event by _event_manager - self._registered_events = [] - - # A list of (listener_threads, event_info) - # See set_executor() for more information - self._event_listener_threads = [] - - # A timer to calculate node FPS - self._timer = StopWatch(window=10) - - # Register enable toggle key - if self.enable_key: - # If the node allows toggling enable, it should override the - # `bypass` method to define the node behavior when disabled. - if not is_method_overridden('bypass', Node, self.__class__): - raise NotImplementedError( - f'The node {self.__class__} does not support toggling' - 'enable but got argument `enable_key`. To support toggling' - 'enable, please override the `bypass` method of the node.') - - self.register_event( - event_name=self.enable_key, - is_keyboard=True, - handler_func=self._toggle_enable, - ) - - # Logger - self.logger = logging.getLogger(f'Node "{self.name}"') - - @property - def registered_buffers(self): - return self._input_buffers + self._output_buffers - - @property - def registered_events(self): - return self._registered_events.copy() - - def _toggle_enable(self): - self._enabled = not self._enabled - - def register_input_buffer(self, - buffer_name: str, - input_name: str, - trigger: bool = False): - """Register an input buffer, so that Node can automatically check if - data is ready, fetch data from the buffers and format the inputs to - feed into `process` method. - - The subclass of Node should invoke `register_input_buffer` in its - `__init__` method. This method can be invoked multiple times to - register multiple input buffers. - - Args: - buffer_name (str): The name of the buffer - input_name (str): The name of the fetched message from the - corresponding buffer - trigger (bool): An trigger input means the node will wait - until the input is ready before processing. Otherwise, an - inessential input will not block the processing, instead - a None will be fetched if the buffer is not ready. - """ - buffer_info = BufferInfo(buffer_name, input_name, trigger) - self._input_buffers.append(buffer_info) - - def register_output_buffer(self, buffer_name: Union[str, List[str]]): - """Register one or multiple output buffers, so that the Node can - automatically send the output of the `process` method to these buffers. - - The subclass of Node should invoke `register_output_buffer` in its - `__init__` method. - - Args: - buffer_name (str|list): The name(s) of the output buffer(s). - """ - - if not isinstance(buffer_name, list): - buffer_name = [buffer_name] - - for name in buffer_name: - buffer_info = BufferInfo(name) - self._output_buffers.append(buffer_info) - - def register_event(self, - event_name: str, - is_keyboard: bool = False, - handler_func: Optional[Callable] = None): - """Register an event. All events used in the node need to be registered - in __init__(). If a callable handler is given, a thread will be create - to listen and handle the event when the node starts. - - Args: - Args: - event_name (str|int): The event name. If is_keyboard==True, - event_name should be a str (as char) or an int (as ascii) - is_keyboard (bool): Indicate whether it is an keyboard - event. If True, the argument event_name will be regarded as a - key indicator. - handler_func (callable, optional): The event handler function, - which should be a collable object with no arguments or - return values. Default: ``None``. - """ - event_info = EventInfo(event_name, is_keyboard, handler_func) - self._registered_events.append(event_info) - - def set_executor(self, executor): - """Assign the node to an executor so the node can access the buffers - and event manager of the executor. - - This method should be invoked by the executor instance. - - Args: - executor (:obj:`WebcamExecutor`): The executor to hold the node - """ - # Get partitioned buffer manager - buffer_names = [ - buffer.buffer_name - for buffer in self._input_buffers + self._output_buffers - ] - self._buffer_manager = executor.buffer_manager.get_sub_manager( - buffer_names) - - # Get event manager - self._event_manager = executor.event_manager - - def _get_input_from_buffer(self) -> Tuple[bool, Optional[Dict]]: - """Get and pack input data. - - The function returns a tuple (status, data). If the trigger buffers - are ready, the status flag will be True, and the packed data is a dict - whose items are buffer names and corresponding messages (unready - non-trigger buffers will give a `None`). Otherwise, the status flag is - False and the packed data is None. - - Returns: - tuple[bool, dict]: The first item is a bool value indicating - whether input is ready (i.e., all tirgger buffers are ready). The - second value is a dict of buffer names and messages. - """ - buffer_manager = self._buffer_manager - - if buffer_manager is None: - raise ValueError(f'Node "{self.name}": not set to an executor.') - - # Check that trigger buffers are ready - for buffer_info in self._input_buffers: - if buffer_info.trigger and buffer_manager.is_empty( - buffer_info.buffer_name): - return False, None - - # Default input - result = { - buffer_info.input_name: None - for buffer_info in self._input_buffers - } - - for buffer_info in self._input_buffers: - - while not buffer_manager.is_empty(buffer_info.buffer_name): - msg = buffer_manager.get(buffer_info.buffer_name, block=False) - if self.multi_input: - if result[buffer_info.input_name] is None: - result[buffer_info.input_name] = [] - result[buffer_info.input_name].append(msg) - else: - result[buffer_info.input_name] = msg - break - - # Return unsuccessful flag if any trigger input is unready - if buffer_info.trigger and result[buffer_info.input_name] is None: - return False, None - - return True, result - - def _send_output_to_buffers(self, output_msg): - """Send output of ``process()`` to the registered output buffers. - - Args: - output_msg (Message): output message - """ - for buffer_info in self._output_buffers: - buffer_name = buffer_info.buffer_name - self._buffer_manager.put_force(buffer_name, output_msg) - - @abstractmethod - def process(self, input_msgs: Dict[str, Message]) -> Union[Message, None]: - """The method that implements the function of the node. - - This method will be invoked when the node is enabled and the input - data is ready. All subclasses of Node should override this method. - - Args: - input_msgs (dict[str, :obj:`Message`]): The input data collected - from the buffers. For each item, the key is the `input_name` - of the registered input buffer, and the value is a Message - instance fetched from the buffer (or None if the buffer is - non-trigger and not ready). - - Returns: - Message: The output message of the node which will be send to all - registered output buffers. - """ - - def bypass(self, input_msgs: Dict[str, Message]) -> Union[Message, None]: - """The method that defines the node behavior when disabled. - - Note that a node must override this method if it has `enable_key`. - This method has the same signature as ``process()``. - - Args: - input_msgs (dict[str, :obj:`Message`]): The input data collected - from the buffers. For each item, the key is the `input_name` - of the registered input buffer, and the value is a Message - instance fetched from the buffer (or None if the buffer is - non-trigger and not ready). - - Returns: - Message: The output message of the node which will be send to all - registered output buffers. - """ - raise NotImplementedError - - def _get_node_info(self) -> Dict: - """Get route information of the node. - - Default information includes: - - ``'fps'``: The processing speed of the node - - ``'timestamp'``: The time that this method is invoked - - Subclasses can override this method to customize the node information. - - Returns: - dict: The items of node information - """ - info = {'fps': self._timer.report('_FPS_'), 'timestamp': time.time()} - return info - - def on_exit(self): - """This method will be invoked on event `_exit_`. - - Subclasses should override this method to specifying the exiting - behavior. - """ - - def run(self): - """Method representing the Node's activity. - - This method override the standard ``run()`` method of Thread. - Subclasses of :class:`Node` should not override this method in - subclasses. - """ - - self.logger.info('Process starts.') - - # Create event listener threads - for event_info in self._registered_events: - - if event_info.handler_func is None: - continue - - def event_listener(): - while True: - with self._event_manager.wait_and_handle( - event_info.event_name, event_info.is_keyboard): - event_info.handler_func() - - t_listener = Thread(target=event_listener, args=(), daemon=True) - t_listener.start() - self._event_listener_threads.append(t_listener) - - # Loop - while True: - # Exit - if self._event_manager.is_set('_exit_'): - self.on_exit() - break - - # Check if input is ready - input_status, input_msgs = self._get_input_from_buffer() - - # Input is not ready - if not input_status: - time.sleep(self.input_check_interval) - continue - - # If a VideoEndingMessage is received, broadcast the signal - # without invoking process() or bypass() - video_ending = False - for _, msg in input_msgs.items(): - if isinstance(msg, VideoEndingMessage): - self._send_output_to_buffers(msg) - video_ending = True - break - - if video_ending: - self.on_exit() - break - - # Check if enabled - if not self._enabled: - # Override bypass method to define node behavior when disabled - output_msg = self.bypass(input_msgs) - else: - with self._timer.timeit(): - with limit_max_fps(self.max_fps): - # Process - output_msg = self.process(input_msgs) - - if output_msg: - # Update route information - node_info = self._get_node_info() - output_msg.update_route_info(node=self, info=node_info) - - # Send output message - if output_msg is not None: - self._send_output_to_buffers(output_msg) - - self.logger.info('Process ends.') diff --git a/mmpose/apis/webcam/nodes/registry.py b/mmpose/apis/webcam/nodes/registry.py deleted file mode 100644 index 06d39fed63..0000000000 --- a/mmpose/apis/webcam/nodes/registry.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from mmengine.registry import Registry - -NODES = Registry('node') diff --git a/mmpose/apis/webcam/nodes/visualizer_nodes/__init__.py b/mmpose/apis/webcam/nodes/visualizer_nodes/__init__.py deleted file mode 100644 index fad7e30376..0000000000 --- a/mmpose/apis/webcam/nodes/visualizer_nodes/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from .bigeye_effect_node import BigeyeEffectNode -from .notice_board_node import NoticeBoardNode -from .object_visualizer_node import ObjectVisualizerNode -from .sunglasses_effect_node import SunglassesEffectNode - -__all__ = [ - 'ObjectVisualizerNode', 'NoticeBoardNode', 'SunglassesEffectNode', - 'BigeyeEffectNode' -] diff --git a/mmpose/apis/webcam/nodes/visualizer_nodes/bigeye_effect_node.py b/mmpose/apis/webcam/nodes/visualizer_nodes/bigeye_effect_node.py deleted file mode 100644 index 3bbec3d670..0000000000 --- a/mmpose/apis/webcam/nodes/visualizer_nodes/bigeye_effect_node.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from itertools import groupby -from typing import Dict, List, Optional, Union - -import cv2 -import numpy as np - -from ...utils import get_eye_keypoint_ids -from ..base_visualizer_node import BaseVisualizerNode -from ..registry import NODES - - -@NODES.register_module() -class BigeyeEffectNode(BaseVisualizerNode): - """Apply big-eye effect to the objects with eye keypoints in the frame. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - kpt_thr (float): The score threshold of valid keypoints. Default: 0.5 - - Example:: - >>> cfg = dict( - ... type='SunglassesEffectNode', - ... name='sunglasses', - ... enable_key='s', - ... enable=False, - ... input_buffer='vis', - ... output_buffer='vis_sunglasses') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - def __init__(self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True, - kpt_thr: float = 0.5): - - super().__init__( - name=name, - input_buffer=input_buffer, - output_buffer=output_buffer, - enable_key=enable_key, - enable=enable) - self.kpt_thr = kpt_thr - - def draw(self, input_msg): - canvas = input_msg.get_image() - - objects = input_msg.get_objects(lambda x: - ('keypoints' in x and 'bbox' in x)) - - for dataset_meta, group in groupby(objects, - lambda x: x['dataset_meta']): - left_eye_index, right_eye_index = get_eye_keypoint_ids( - dataset_meta) - canvas = self.apply_bigeye_effect(canvas, group, left_eye_index, - right_eye_index) - return canvas - - def apply_bigeye_effect(self, canvas: np.ndarray, objects: List[Dict], - left_eye_index: int, - right_eye_index: int) -> np.ndarray: - """Apply big-eye effect. - - Args: - canvas (np.ndarray): The image to apply the effect - objects (list[dict]): The object list with bbox and keypoints - - "bbox" ([K, 4(or 5)]): bbox in [x1, y1, x2, y2, (score)] - - "keypoints" ([K,3]): keypoints in [x, y, score] - left_eye_index (int): Keypoint index of left eye - right_eye_index (int): Keypoint index of right eye - - Returns: - np.ndarray: Processed image. - """ - - xx, yy = np.meshgrid( - np.arange(canvas.shape[1]), np.arange(canvas.shape[0])) - xx = xx.astype(np.float32) - yy = yy.astype(np.float32) - - for obj in objects: - bbox = obj['bbox'] - kpts = obj['keypoints'] - kpt_scores = obj['keypoint_scores'] - - if kpt_scores[left_eye_index] < self.kpt_thr or kpt_scores[ - right_eye_index] < self.kpt_thr: - continue - - kpt_leye = kpts[left_eye_index, :2] - kpt_reye = kpts[right_eye_index, :2] - for xc, yc in [kpt_leye, kpt_reye]: - - # distortion parameters - k1 = 0.001 - epe = 1e-5 - - scale = (bbox[2] - bbox[0])**2 + (bbox[3] - bbox[1])**2 - r2 = ((xx - xc)**2 + (yy - yc)**2) - r2 = (r2 + epe) / scale # normalized by bbox scale - - xx = (xx - xc) / (1 + k1 / r2) + xc - yy = (yy - yc) / (1 + k1 / r2) + yc - - canvas = cv2.remap( - canvas, - xx, - yy, - interpolation=cv2.INTER_AREA, - borderMode=cv2.BORDER_REPLICATE) - - return canvas diff --git a/mmpose/apis/webcam/nodes/visualizer_nodes/notice_board_node.py b/mmpose/apis/webcam/nodes/visualizer_nodes/notice_board_node.py deleted file mode 100644 index 0578ec38eb..0000000000 --- a/mmpose/apis/webcam/nodes/visualizer_nodes/notice_board_node.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from typing import List, Optional, Tuple, Union - -import cv2 -import numpy as np -from mmcv import color_val - -from ...utils import FrameMessage -from ..base_visualizer_node import BaseVisualizerNode -from ..registry import NODES - - -@NODES.register_module() -class NoticeBoardNode(BaseVisualizerNode): - """Show text messages in the frame. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - content_lines (list[str], optional): The lines of text message to show - in the frame. If not given, a default message will be shown. - Default: ``None`` - x_offset (int): The position of the notice board's left border in - pixels. Default: 20 - y_offset (int): The position of the notice board's top border in - pixels. Default: 20 - y_delta (int): The line height in pixels. Default: 15 - text_color (str|tuple): The font color represented in a color name or - a BGR tuple. Default: ``'black'`` - backbround_color (str|tuple): The background color represented in a - color name or a BGR tuple. Default: (255, 183, 0) - text_scale (float): The font scale factor that is multiplied by the - base size. Default: 0.4 - - Example:: - >>> cfg = dict( - ... type='NoticeBoardNode', - ... name='instruction', - ... enable_key='h', - ... enable=True, - ... input_buffer='vis_bigeye', - ... output_buffer='vis_notice', - ... content_lines=[ - ... 'This is a demo for pose visualization and simple image ' - ... 'effects. Have fun!', '', 'Hot-keys:', - ... '"v": Pose estimation result visualization', - ... '"s": Sunglasses effect B-)', '"b": Big-eye effect 0_0', - ... '"h": Show help information', - ... '"m": Show diagnostic information', '"q": Exit' - ... ], - ... ) - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - default_content_lines = ['This is a notice board!'] - - def __init__(self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True, - content_lines: Optional[List[str]] = None, - x_offset: int = 20, - y_offset: int = 20, - y_delta: int = 15, - text_color: Union[str, Tuple[int, int, int]] = 'black', - background_color: Union[str, Tuple[int, int, - int]] = (255, 183, 0), - text_scale: float = 0.4): - super().__init__( - name=name, - input_buffer=input_buffer, - output_buffer=output_buffer, - enable_key=enable_key, - enable=enable) - - self.x_offset = x_offset - self.y_offset = y_offset - self.y_delta = y_delta - self.text_color = color_val(text_color) - self.background_color = color_val(background_color) - self.text_scale = text_scale - - if content_lines: - self.content_lines = content_lines - else: - self.content_lines = self.default_content_lines - - def draw(self, input_msg: FrameMessage) -> np.ndarray: - img = input_msg.get_image() - canvas = np.full(img.shape, self.background_color, dtype=img.dtype) - - x = self.x_offset - y = self.y_offset - - max_len = max([len(line) for line in self.content_lines]) - - def _put_line(line=''): - nonlocal y - cv2.putText(canvas, line, (x, y), cv2.FONT_HERSHEY_DUPLEX, - self.text_scale, self.text_color, 1) - y += self.y_delta - - for line in self.content_lines: - _put_line(line) - - x1 = max(0, self.x_offset) - x2 = min(img.shape[1], int(x + max_len * self.text_scale * 20)) - y1 = max(0, self.y_offset - self.y_delta) - y2 = min(img.shape[0], y) - - src1 = canvas[y1:y2, x1:x2] - src2 = img[y1:y2, x1:x2] - img[y1:y2, x1:x2] = cv2.addWeighted(src1, 0.5, src2, 0.5, 0) - - return img diff --git a/mmpose/apis/webcam/nodes/visualizer_nodes/object_visualizer_node.py b/mmpose/apis/webcam/nodes/visualizer_nodes/object_visualizer_node.py deleted file mode 100644 index ef28a0804c..0000000000 --- a/mmpose/apis/webcam/nodes/visualizer_nodes/object_visualizer_node.py +++ /dev/null @@ -1,341 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import math -from itertools import groupby -from typing import Dict, List, Optional, Tuple, Union - -import cv2 -import mmcv -import numpy as np - -from ...utils import FrameMessage -from ..base_visualizer_node import BaseVisualizerNode -from ..registry import NODES - - -def imshow_bboxes(img, - bboxes, - labels=None, - colors='green', - text_color='white', - thickness=1, - font_scale=0.5): - """Draw bboxes with labels (optional) on an image. This is a wrapper of - mmcv.imshow_bboxes. - - Args: - img (str or ndarray): The image to be displayed. - bboxes (ndarray): ndarray of shape (k, 4), each row is a bbox in - format [x1, y1, x2, y2]. - labels (str or list[str], optional): labels of each bbox. - colors (list[str or tuple or :obj:`Color`]): A list of colors. - text_color (str or tuple or :obj:`Color`): Color of texts. - thickness (int): Thickness of lines. - font_scale (float): Font scales of texts. - - Returns: - ndarray: The image with bboxes drawn on it. - """ - - # adapt to mmcv.imshow_bboxes input format - bboxes = np.split( - bboxes, bboxes.shape[0], axis=0) if bboxes.shape[0] > 0 else [] - if not isinstance(colors, list): - colors = [colors for _ in range(len(bboxes))] - colors = [mmcv.color_val(c) for c in colors] - assert len(bboxes) == len(colors) - - img = mmcv.imshow_bboxes( - img, - bboxes, - colors, - top_k=-1, - thickness=thickness, - show=False, - out_file=None) - - if labels is not None: - if not isinstance(labels, list): - labels = [labels for _ in range(len(bboxes))] - assert len(labels) == len(bboxes) - - for bbox, label, color in zip(bboxes, labels, colors): - if label is None: - continue - bbox_int = bbox[0, :4].astype(np.int32) - # roughly estimate the proper font size - text_size, text_baseline = cv2.getTextSize(label, - cv2.FONT_HERSHEY_DUPLEX, - font_scale, thickness) - text_x1 = bbox_int[0] - text_y1 = max(0, bbox_int[1] - text_size[1] - text_baseline) - text_x2 = bbox_int[0] + text_size[0] - text_y2 = text_y1 + text_size[1] + text_baseline - cv2.rectangle(img, (text_x1, text_y1), (text_x2, text_y2), color, - cv2.FILLED) - cv2.putText(img, label, (text_x1, text_y2 - text_baseline), - cv2.FONT_HERSHEY_DUPLEX, font_scale, - mmcv.color_val(text_color), thickness) - - return img - - -def imshow_keypoints(img, - pose_result, - skeleton=None, - kpt_score_thr=0.3, - pose_kpt_color=None, - pose_link_color=None, - radius=4, - thickness=1, - show_keypoint_weight=False): - """Draw keypoints and links on an image. - - Args: - img (str or Tensor): The image to draw poses on. If an image array - is given, id will be modified in-place. - pose_result (list[kpts]): The poses to draw. Each element kpts is - a set of K keypoints as an Kx3 numpy.ndarray, where each - keypoint is represented as x, y, score. - kpt_score_thr (float, optional): Minimum score of keypoints - to be shown. Default: 0.3. - pose_kpt_color (np.array[Nx3]`): Color of N keypoints. If None, - the keypoint will not be drawn. - pose_link_color (np.array[Mx3]): Color of M links. If None, the - links will not be drawn. - thickness (int): Thickness of lines. - """ - - img = mmcv.imread(img) - img_h, img_w, _ = img.shape - - for kpts in pose_result: - - kpts = np.array(kpts, copy=False) - - # draw each point on image - if pose_kpt_color is not None: - assert len(pose_kpt_color) == len(kpts) - - for kid, kpt in enumerate(kpts): - x_coord, y_coord, kpt_score = int(kpt[0]), int(kpt[1]), kpt[2] - - if kpt_score < kpt_score_thr or pose_kpt_color[kid] is None: - # skip the point that should not be drawn - continue - - color = tuple(int(c) for c in pose_kpt_color[kid]) - if show_keypoint_weight: - img_copy = img.copy() - cv2.circle(img_copy, (int(x_coord), int(y_coord)), radius, - color, -1) - transparency = max(0, min(1, kpt_score)) - cv2.addWeighted( - img_copy, - transparency, - img, - 1 - transparency, - 0, - dst=img) - else: - cv2.circle(img, (int(x_coord), int(y_coord)), radius, - color, -1) - - # draw links - if skeleton is not None and pose_link_color is not None: - assert len(pose_link_color) == len(skeleton) - - for sk_id, sk in enumerate(skeleton): - pos1 = (int(kpts[sk[0], 0]), int(kpts[sk[0], 1])) - pos2 = (int(kpts[sk[1], 0]), int(kpts[sk[1], 1])) - - if (pos1[0] <= 0 or pos1[0] >= img_w or pos1[1] <= 0 - or pos1[1] >= img_h or pos2[0] <= 0 or pos2[0] >= img_w - or pos2[1] <= 0 or pos2[1] >= img_h - or kpts[sk[0], 2] < kpt_score_thr - or kpts[sk[1], 2] < kpt_score_thr - or pose_link_color[sk_id] is None): - # skip the link that should not be drawn - continue - color = tuple(int(c) for c in pose_link_color[sk_id]) - if show_keypoint_weight: - img_copy = img.copy() - X = (pos1[0], pos2[0]) - Y = (pos1[1], pos2[1]) - mX = np.mean(X) - mY = np.mean(Y) - length = ((Y[0] - Y[1])**2 + (X[0] - X[1])**2)**0.5 - angle = math.degrees(math.atan2(Y[0] - Y[1], X[0] - X[1])) - stickwidth = 2 - polygon = cv2.ellipse2Poly( - (int(mX), int(mY)), (int(length / 2), int(stickwidth)), - int(angle), 0, 360, 1) - cv2.fillConvexPoly(img_copy, polygon, color) - transparency = max( - 0, min(1, 0.5 * (kpts[sk[0], 2] + kpts[sk[1], 2]))) - cv2.addWeighted( - img_copy, - transparency, - img, - 1 - transparency, - 0, - dst=img) - else: - cv2.line(img, pos1, pos2, color, thickness=thickness) - - return img - - -@NODES.register_module() -class ObjectVisualizerNode(BaseVisualizerNode): - """Visualize the bounding box and keypoints of objects. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: (1) If ``enable_key`` is set, - the ``bypass()`` method need to be overridden to define the node - behavior when disabled; (2) Some hot-keys are reserved for - particular use. For example: 'q', 'Q' and 27 are used for exiting. - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True`` - show_bbox (bool): Set ``True`` to show the bboxes of detection - objects. Default: ``True`` - show_keypoint (bool): Set ``True`` to show the pose estimation - results. Default: ``True`` - must_have_bbox (bool): Only show objects with keypoints. - Default: ``False`` - kpt_thr (float): The threshold of keypoint score. Default: 0.3 - radius (int): The radius of keypoint. Default: 4 - thickness (int): The thickness of skeleton. Default: 2 - bbox_color (str|tuple|dict): The color of bboxes. If a single color is - given (a str like 'green' or a BGR tuple like (0, 255, 0)), it - will be used for all bboxes. If a dict is given, it will be used - as a map from class labels to bbox colors. If not given, a default - color map will be used. Default: ``None`` - - Example:: - >>> cfg = dict( - ... type='ObjectVisualizerNode', - ... name='object visualizer', - ... enable_key='v', - ... enable=True, - ... show_bbox=True, - ... must_have_keypoint=False, - ... show_keypoint=True, - ... input_buffer='frame', - ... output_buffer='vis') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - default_bbox_color = { - 'person': (148, 139, 255), - 'cat': (255, 255, 0), - 'dog': (255, 255, 0), - } - - def __init__(self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True, - show_bbox: bool = True, - show_keypoint: bool = True, - must_have_keypoint: bool = False, - kpt_thr: float = 0.3, - radius: int = 4, - thickness: int = 2, - bbox_color: Optional[Union[str, Tuple, Dict]] = 'green'): - - super().__init__( - name=name, - input_buffer=input_buffer, - output_buffer=output_buffer, - enable_key=enable_key, - enable=enable) - - self.kpt_thr = kpt_thr - self.bbox_color = bbox_color - self.show_bbox = show_bbox - self.show_keypoint = show_keypoint - self.must_have_keypoint = must_have_keypoint - self.radius = radius - self.thickness = thickness - - def _draw_bbox(self, canvas: np.ndarray, input_msg: FrameMessage): - """Draw object bboxes.""" - - if self.must_have_keypoint: - objects = input_msg.get_objects( - lambda x: 'bbox' in x and 'keypoints' in x) - else: - objects = input_msg.get_objects(lambda x: 'bbox' in x) - # return if there is no detected objects - if not objects: - return canvas - - bboxes = [obj['bbox'] for obj in objects] - labels = [obj.get('label', None) for obj in objects] - default_color = (0, 255, 0) - - # Get bbox colors - if isinstance(self.bbox_color, dict): - colors = [ - self.bbox_color.get(label, default_color) for label in labels - ] - else: - colors = self.bbox_color - - imshow_bboxes( - canvas, - np.vstack(bboxes), - labels=labels, - colors=colors, - text_color='white', - font_scale=0.5) - - return canvas - - def _draw_keypoint(self, canvas: np.ndarray, input_msg: FrameMessage): - """Draw object keypoints.""" - objects = input_msg.get_objects(lambda x: 'pose_model_cfg' in x) - - # return if there is no object with keypoints - if not objects: - return canvas - - for model_cfg, group in groupby(objects, - lambda x: x['pose_model_cfg']): - dataset_info = objects[0]['dataset_meta'] - keypoints = [ - np.concatenate( - (obj['keypoints'], obj['keypoint_scores'][:, None]), - axis=1) for obj in group - ] - imshow_keypoints( - canvas, - keypoints, - skeleton=dataset_info['skeleton_links'], - kpt_score_thr=self.kpt_thr, - pose_kpt_color=dataset_info['keypoint_colors'], - pose_link_color=dataset_info['skeleton_link_colors'], - radius=self.radius, - thickness=self.thickness) - - return canvas - - def draw(self, input_msg: FrameMessage) -> np.ndarray: - canvas = input_msg.get_image() - - if self.show_bbox: - canvas = self._draw_bbox(canvas, input_msg) - - if self.show_keypoint: - canvas = self._draw_keypoint(canvas, input_msg) - - return canvas diff --git a/mmpose/apis/webcam/nodes/visualizer_nodes/sunglasses_effect_node.py b/mmpose/apis/webcam/nodes/visualizer_nodes/sunglasses_effect_node.py deleted file mode 100644 index 7c011177f5..0000000000 --- a/mmpose/apis/webcam/nodes/visualizer_nodes/sunglasses_effect_node.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from itertools import groupby -from typing import Dict, List, Optional, Union - -import cv2 -import numpy as np - -from ...utils import get_eye_keypoint_ids, load_image_from_disk_or_url -from ..base_visualizer_node import BaseVisualizerNode -from ..registry import NODES - - -@NODES.register_module() -class SunglassesEffectNode(BaseVisualizerNode): - """Apply sunglasses effect (draw sunglasses at the facial area)to the - objects with eye keypoints in the frame. - - Args: - name (str): The node name (also thread name) - input_buffer (str): The name of the input buffer - output_buffer (str|list): The name(s) of the output buffer(s) - enable_key (str|int, optional): Set a hot-key to toggle enable/disable - of the node. If an int value is given, it will be treated as an - ascii code of a key. Please note: - 1. If enable_key is set, the bypass method need to be - overridden to define the node behavior when disabled - 2. Some hot-key has been use for particular use. For example: - 'q', 'Q' and 27 are used for quit - Default: ``None`` - enable (bool): Default enable/disable status. Default: ``True``. - kpt_thr (float): The score threshold of valid keypoints. Default: 0.5 - resource_img_path (str, optional): The resource image path or url. - The image should be a pair of sunglasses with white background. - If not specified, the url of a default image will be used. See - ``SunglassesNode.default_resource_img_path``. Default: ``None`` - - Example:: - >>> cfg = dict( - ... type='SunglassesEffectNode', - ... name='sunglasses', - ... enable_key='s', - ... enable=False, - ... input_buffer='vis', - ... output_buffer='vis_sunglasses') - - >>> from mmpose.apis.webcam.nodes import NODES - >>> node = NODES.build(cfg) - """ - - # The image attributes to: - # "https://www.vecteezy.com/vector-art/1932353-summer-sunglasses- - # accessory-isolated-icon" by Vecteezy - default_resource_img_path = ( - 'https://user-images.githubusercontent.com/15977946/' - '170850839-acc59e26-c6b3-48c9-a9ec-87556edb99ed.jpg') - - def __init__(self, - name: str, - input_buffer: str, - output_buffer: Union[str, List[str]], - enable_key: Optional[Union[str, int]] = None, - enable: bool = True, - kpt_thr: float = 0.5, - resource_img_path: Optional[str] = None): - - super().__init__( - name=name, - input_buffer=input_buffer, - output_buffer=output_buffer, - enable_key=enable_key, - enable=enable) - - if resource_img_path is None: - resource_img_path = self.default_resource_img_path - - self.resource_img = load_image_from_disk_or_url(resource_img_path) - self.kpt_thr = kpt_thr - - def draw(self, input_msg): - canvas = input_msg.get_image() - - objects = input_msg.get_objects(lambda x: 'keypoints' in x) - - for dataset_meta, group in groupby(objects, - lambda x: x['dataset_meta']): - left_eye_index, right_eye_index = get_eye_keypoint_ids( - dataset_meta) - canvas = self.apply_sunglasses_effect(canvas, group, - left_eye_index, - right_eye_index) - return canvas - - def apply_sunglasses_effect(self, canvas: np.ndarray, objects: List[Dict], - left_eye_index: int, - right_eye_index: int) -> np.ndarray: - """Apply sunglasses effect. - - Args: - canvas (np.ndarray): The image to apply the effect - objects (list[dict]): The object list with keypoints - - "keypoints" ([K,3]): keypoints in [x, y, score] - left_eye_index (int): Keypoint index of the left eye - right_eye_index (int): Keypoint index of the right eye - - Returns: - np.ndarray: Processed image - """ - - hm, wm = self.resource_img.shape[:2] - # anchor points in the sunglasses image - pts_src = np.array([[0.3 * wm, 0.3 * hm], [0.3 * wm, 0.7 * hm], - [0.7 * wm, 0.3 * hm], [0.7 * wm, 0.7 * hm]], - dtype=np.float32) - - for obj in objects: - kpts = obj['keypoints'] - kpt_scores = obj['keypoint_scores'] - - if kpt_scores[left_eye_index] < self.kpt_thr or kpt_scores[ - right_eye_index] < self.kpt_thr: - continue - - kpt_leye = kpts[left_eye_index, :2] - kpt_reye = kpts[right_eye_index, :2] - # orthogonal vector to the left-to-right eyes - vo = 0.5 * (kpt_reye - kpt_leye)[::-1] * [-1, 1] - - # anchor points in the image by eye positions - pts_tar = np.vstack( - [kpt_reye + vo, kpt_reye - vo, kpt_leye + vo, kpt_leye - vo]) - - h_mat, _ = cv2.findHomography(pts_src, pts_tar) - patch = cv2.warpPerspective( - self.resource_img, - h_mat, - dsize=(canvas.shape[1], canvas.shape[0]), - borderValue=(255, 255, 255)) - # mask the white background area in the patch with a threshold 200 - mask = cv2.cvtColor(patch, cv2.COLOR_BGR2GRAY) - mask = (mask < 200).astype(np.uint8) - canvas = cv2.copyTo(patch, mask, canvas) - - return canvas diff --git a/mmpose/apis/webcam/utils/__init__.py b/mmpose/apis/webcam/utils/__init__.py deleted file mode 100644 index 2911bcd5bf..0000000000 --- a/mmpose/apis/webcam/utils/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from .buffer import BufferManager -from .event import EventManager -from .image_capture import ImageCapture -from .message import FrameMessage, Message, VideoEndingMessage -from .misc import (copy_and_paste, expand_and_clamp, get_cached_file_path, - get_config_path, is_image_file, limit_max_fps, - load_image_from_disk_or_url, screen_matting) -from .pose import (get_eye_keypoint_ids, get_face_keypoint_ids, - get_hand_keypoint_ids, get_mouth_keypoint_ids, - get_wrist_keypoint_ids) - -__all__ = [ - 'BufferManager', 'EventManager', 'FrameMessage', 'Message', - 'limit_max_fps', 'VideoEndingMessage', 'load_image_from_disk_or_url', - 'get_cached_file_path', 'screen_matting', 'get_config_path', - 'expand_and_clamp', 'copy_and_paste', 'is_image_file', 'ImageCapture', - 'get_eye_keypoint_ids', 'get_face_keypoint_ids', 'get_wrist_keypoint_ids', - 'get_mouth_keypoint_ids', 'get_hand_keypoint_ids' -] diff --git a/mmpose/apis/webcam/utils/buffer.py b/mmpose/apis/webcam/utils/buffer.py deleted file mode 100644 index f7f8b9864e..0000000000 --- a/mmpose/apis/webcam/utils/buffer.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from functools import wraps -from queue import Queue -from typing import Any, Dict, List, Optional - -from mmengine import is_seq_of - -__all__ = ['BufferManager'] - - -def check_buffer_registered(exist=True): - """A function wrapper to check the buffer existence before it is being used - by the wrapped function. - - Args: - exist (bool): If set to ``True``, assert the buffer exists; if set to - ``False``, assert the buffer does not exist. Default: ``True`` - """ - - def wrapper(func): - - @wraps(func) - def wrapped(manager, name, *args, **kwargs): - if exist: - # Assert buffer exist - if name not in manager: - raise ValueError(f'Fail to call {func.__name__}: ' - f'buffer "{name}" is not registered.') - else: - # Assert buffer not exist - if name in manager: - raise ValueError(f'Fail to call {func.__name__}: ' - f'buffer "{name}" is already registered.') - return func(manager, name, *args, **kwargs) - - return wrapped - - return wrapper - - -class Buffer(Queue): - - def put_force(self, item: Any): - """Force to put an item into the buffer. - - If the buffer is already full, the earliest item in the buffer will be - remove to make room for the incoming item. - - Args: - item (any): The item to put into the buffer - """ - with self.mutex: - if self.maxsize > 0: - while self._qsize() >= self.maxsize: - _ = self._get() - self.unfinished_tasks -= 1 - - self._put(item) - self.unfinished_tasks += 1 - self.not_empty.notify() - - -class BufferManager(): - """A helper class to manage multiple buffers. - - Parameters: - buffer_type (type): The class to build buffer instances. Default: - :class:`mmpose.apis.webcam.utils.buffer.Buffer`. - buffers (dict, optional): Create :class:`BufferManager` from existing - buffers. Each item should a buffer name and the buffer. If not - given, an empty buffer manager will be create. Default: ``None`` - """ - - def __init__(self, - buffer_type: type = Buffer, - buffers: Optional[Dict] = None): - self.buffer_type = buffer_type - if buffers is None: - self._buffers = {} - else: - if is_seq_of(list(buffers.values()), buffer_type): - self._buffers = buffers.copy() - else: - raise ValueError('The values of buffers should be instance ' - f'of {buffer_type}') - - def __contains__(self, name): - return name in self._buffers - - @check_buffer_registered(False) - def register_buffer(self, name, maxsize: int = 0): - """Register a buffer. - - If the buffer already exists, an ValueError will be raised. - - Args: - name (any): The buffer name - maxsize (int): The capacity of the buffer. If set to 0, the - capacity is unlimited. Default: 0 - """ - self._buffers[name] = self.buffer_type(maxsize) - - @check_buffer_registered() - def put(self, name, item, block: bool = True, timeout: float = None): - """Put an item into specified buffer. - - Args: - name (any): The buffer name - item (any): The item to put into the buffer - block (bool): If set to ``True``, block if necessary util a free - slot is available in the target buffer. It blocks at most - ``timeout`` seconds and raises the ``Full`` exception. - Otherwise, put an item on the queue if a free slot is - immediately available, else raise the ``Full`` exception. - Default: ``True`` - timeout (float, optional): The most waiting time in seconds if - ``block`` is ``True``. Default: ``None`` - """ - self._buffers[name].put(item, block, timeout) - - @check_buffer_registered() - def put_force(self, name, item): - """Force to put an item into specified buffer. If the buffer was full, - the earliest item within the buffer will be popped out to make a free - slot. - - Args: - name (any): The buffer name - item (any): The item to put into the buffer - """ - self._buffers[name].put_force(item) - - @check_buffer_registered() - def get(self, name, block: bool = True, timeout: float = None) -> Any: - """Remove an return an item from the specified buffer. - - Args: - name (any): The buffer name - block (bool): If set to ``True``, block if necessary until an item - is available in the target buffer. It blocks at most - ``timeout`` seconds and raises the ``Empty`` exception. - Otherwise, return an item if one is immediately available, - else raise the ``Empty`` exception. Default: ``True`` - timeout (float, optional): The most waiting time in seconds if - ``block`` is ``True``. Default: ``None`` - - Returns: - any: The returned item. - """ - return self._buffers[name].get(block, timeout) - - @check_buffer_registered() - def is_empty(self, name) -> bool: - """Check if a buffer is empty. - - Args: - name (any): The buffer name - - Returns: - bool: Weather the buffer is empty. - """ - return self._buffers[name].empty() - - @check_buffer_registered() - def is_full(self, name): - """Check if a buffer is full. - - Args: - name (any): The buffer name - - Returns: - bool: Weather the buffer is full. - """ - return self._buffers[name].full() - - def get_sub_manager(self, buffer_names: List[str]) -> 'BufferManager': - """Return a :class:`BufferManager` instance that covers a subset of the - buffers in the parent. The is usually used to partially share the - buffers of the executor to the node. - - Args: - buffer_names (list): The list of buffers to create the sub manager - - Returns: - BufferManager: The created sub buffer manager. - """ - buffers = {name: self._buffers[name] for name in buffer_names} - return BufferManager(self.buffer_type, buffers) - - def get_info(self): - """Returns the information of all buffers in the manager. - - Returns: - dict[any, dict]: Each item is a buffer name and the information - dict of that buffer. - """ - buffer_info = {} - for name, buffer in self._buffers.items(): - buffer_info[name] = { - 'size': buffer.qsize(), - 'maxsize': buffer.maxsize - } - return buffer_info diff --git a/mmpose/apis/webcam/utils/event.py b/mmpose/apis/webcam/utils/event.py deleted file mode 100644 index b8e88e1d8b..0000000000 --- a/mmpose/apis/webcam/utils/event.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import logging -from collections import defaultdict -from contextlib import contextmanager -from threading import Event -from typing import Optional - -logger = logging.getLogger('Event') - - -class EventManager(): - """A helper class to manage events. - - :class:`EventManager` provides interfaces to register, set, clear and - check events by name. - """ - - def __init__(self): - self._events = defaultdict(Event) - - def register_event(self, event_name: str, is_keyboard: bool = False): - """Register an event. A event must be registered first before being - set, cleared or checked. - - Args: - event_name (str): The indicator of the event. The name should be - unique in one :class:`EventManager` instance - is_keyboard (bool): Specify weather it is a keyboard event. If so, - the ``event_name`` should be the key value, and the indicator - will be set as ``'_keyboard_{event_name}'``. Otherwise, the - ``event_name`` will be directly used as the indicator. - Default: ``False`` - """ - if is_keyboard: - event_name = self._get_keyboard_event_name(event_name) - self._events[event_name] = Event() - - def set(self, event_name: str, is_keyboard: bool = False): - """Set the internal flag of an event to ``True``. - - Args: - event_name (str): The indicator of the event - is_keyboard (bool): Specify weather it is a keyboard event. See - ``register_event()`` for details. Default: False - """ - if is_keyboard: - event_name = self._get_keyboard_event_name(event_name) - self._events[event_name].set() - logger.info(f'Event {event_name} is set.') - - def wait(self, - event_name: str = None, - is_keyboard: bool = False, - timeout: Optional[float] = None) -> bool: - """Block until the internal flag of an event is ``True``. - - Args: - event_name (str): The indicator of the event - is_keyboard (bool): Specify weather it is a keyboard event. See - ``register_event()`` for details. Default: False - timeout (float, optional): The optional maximum blocking time in - seconds. Default: ``None`` - - Returns: - bool: The internal event flag on exit. - """ - if is_keyboard: - event_name = self._get_keyboard_event_name(event_name) - return self._events[event_name].wait(timeout) - - def is_set(self, - event_name: str = None, - is_keyboard: Optional[bool] = False) -> bool: - """Check weather the internal flag of an event is ``True``. - - Args: - event_name (str): The indicator of the event - is_keyboard (bool): Specify weather it is a keyboard event. See - ``register_event()`` for details. Default: False - Returns: - bool: The internal event flag. - """ - if is_keyboard: - event_name = self._get_keyboard_event_name(event_name) - return self._events[event_name].is_set() - - def clear(self, - event_name: str = None, - is_keyboard: Optional[bool] = False): - """Reset the internal flag of en event to False. - - Args: - event_name (str): The indicator of the event - is_keyboard (bool): Specify weather it is a keyboard event. See - ``register_event()`` for details. Default: False - """ - if is_keyboard: - event_name = self._get_keyboard_event_name(event_name) - self._events[event_name].clear() - logger.info(f'Event {event_name} is cleared.') - - @staticmethod - def _get_keyboard_event_name(key): - """Get keyboard event name from the key value.""" - return f'_keyboard_{chr(key) if isinstance(key,int) else key}' - - @contextmanager - def wait_and_handle(self, - event_name: str = None, - is_keyboard: Optional[bool] = False): - """Context manager that blocks until an evenet is set ``True`` and then - goes into the context. - - The internal event flag will be reset ``False`` automatically before - entering the context. - - Args: - event_name (str): The indicator of the event - is_keyboard (bool): Specify weather it is a keyboard event. See - ``register_event()`` for details. Default: False - - Example:: - >>> from mmpose.apis.webcam.utils import EventManager - >>> manager = EventManager() - >>> manager.register_event('q', is_keybard=True) - - >>> # Once the keyboard event `q` is set, ``wait_and_handle`` - >>> # will reset the event and enter the context to invoke - >>> # ``foo()`` - >>> with manager.wait_and_handle('q', is_keybard=True): - ... foo() - """ - self.wait(event_name, is_keyboard) - try: - yield - finally: - self.clear(event_name, is_keyboard) diff --git a/mmpose/apis/webcam/utils/image_capture.py b/mmpose/apis/webcam/utils/image_capture.py deleted file mode 100644 index fb28acff94..0000000000 --- a/mmpose/apis/webcam/utils/image_capture.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from typing import Union - -import cv2 -import numpy as np - -from .misc import load_image_from_disk_or_url - - -class ImageCapture: - """A mock-up of cv2.VideoCapture that always return a const image. - - Args: - image (str | ndarray): The image path or image data - """ - - def __init__(self, image: Union[str, np.ndarray]): - if isinstance(image, str): - self.image = load_image_from_disk_or_url(image) - else: - self.image = image - - def isOpened(self): - return (self.image is not None) - - def read(self): - return True, self.image.copy() - - def release(self): - pass - - def get(self, propId): - if propId == cv2.CAP_PROP_FRAME_WIDTH: - return self.image.shape[1] - elif propId == cv2.CAP_PROP_FRAME_HEIGHT: - return self.image.shape[0] - elif propId == cv2.CAP_PROP_FPS: - return np.nan - else: - raise NotImplementedError() diff --git a/mmpose/apis/webcam/utils/message.py b/mmpose/apis/webcam/utils/message.py deleted file mode 100644 index 8961ea39c2..0000000000 --- a/mmpose/apis/webcam/utils/message.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import time -import uuid -import warnings -from typing import Callable, Dict, List, Optional - -import numpy as np - -Filter = Callable[[Dict], bool] - - -class Message(): - """Message base class. - - All message class should inherit this class. The basic use of a Message - instance is to carray a piece of text message (self.msg) and a dict that - stores structured data (self.data), e.g. frame image, model prediction, - et al. - - A message may also hold route information, which is composed of - information of all nodes the message has passed through. - - Parameters: - msg (str): The text message. - data (dict, optional): The structured data. - """ - - def __init__(self, msg: str = '', data: Optional[Dict] = None): - self.msg = msg - self.data = data if data else {} - self.route_info = [] - self.timestamp = time.time() - self.id = uuid.uuid1() - - def update_route_info(self, - node=None, - node_name: Optional[str] = None, - node_type: Optional[str] = None, - info: Optional[Dict] = None): - """Append new node information to the route information. - - Args: - node (Node, optional): An instance of Node that provides basic - information like the node name and type. Default: ``None``. - node_name (str, optional): The node name. If node is given, - node_name will be ignored. Default: ``None``. - node_type (str, optional): The class name of the node. If node - is given, node_type will be ignored. Default: ``None``. - info (dict, optional): The node information, which is usually - given by node.get_node_info(). Default: ``None``. - """ - if node is not None: - if node_name is not None or node_type is not None: - warnings.warn( - '`node_name` and `node_type` will be overridden if node ' - 'is provided.') - node_name = node.name - node_type = node.__class__.__name__ - - node_info = {'node': node_name, 'node_type': node_type, 'info': info} - self.route_info.append(node_info) - - def set_route_info(self, route_info: List[Dict]): - """Directly set the entire route information. - - Args: - route_info (list): route information to set to the message. - """ - self.route_info = route_info - - def merge_route_info(self, route_info: List[Dict]): - """Merge the given route information into the original one of the - message. This is used for combining route information from multiple - messages. The node information in the route will be reordered according - to their timestamps. - - Args: - route_info (list): route information to merge. - """ - self.route_info += route_info - self.route_info.sort(key=lambda x: x.get('timestamp', np.inf)) - - def get_route_info(self) -> List: - return self.route_info.copy() - - -class VideoEndingMessage(Message): - """The special message to indicate the ending of the input video.""" - - -class FrameMessage(Message): - """The message to store information of a video frame.""" - - def __init__(self, img): - super().__init__(data=dict(image=img, objects={}, model_cfgs={})) - - def get_image(self) -> np.ndarray: - """Get the frame image. - - Returns: - np.ndarray: The frame image. - """ - return self.data.get('image', None) - - def set_image(self, img): - """Set the frame image to the message. - - Args: - img (np.ndarray): The frame image. - """ - self.data['image'] = img - - def set_objects(self, objects: List[Dict]): - """Set the object information. The old object information will be - cleared. - - Args: - objects (list[dict]): A list of object information - - See also :func:`update_objects`. - """ - self.data['objects'] = {} - self.update_objects(objects) - - def update_objects(self, objects: List[Dict]): - """Update object information. - - Each object will be assigned an unique ID if it does not has one. If - an object's ID already exists in ``self.data['objects']``, the object - information will be updated; otherwise it will be added as a new - object. - - Args: - objects (list[dict]): A list of object information - """ - for obj in objects: - if '_id_' in obj: - # get the object id if it exists - obj_id = obj['_id_'] - else: - # otherwise assign a new object id - obj_id = uuid.uuid1() - obj['_id_'] = obj_id - self.data['objects'][obj_id] = obj - - def get_objects(self, obj_filter: Optional[Filter] = None) -> List[Dict]: - """Get object information from the frame data. - - Default to return all objects in the frame data. Optionally, filters - can be set to retrieve objects with specific keys and values. The - filters are represented as a dict. Each key in the filters specifies a - required key of the object. Each value in the filters is a tuple that - enumerate the required values of the corresponding key in the object. - - Args: - obj_filter (callable, optional): A filter function that returns a - bool value from a object (dict). If provided, only objects - that return True will be retrieved. Otherwise all objects will - be retrieved. Default: ``None``. - - Returns: - list[dict]: A list of object information. - - - Example:: - >>> objects = [ - ... {'_id_': 2, 'label': 'dog'} - ... {'_id_': 1, 'label': 'cat'}, - ... ] - >>> frame = FrameMessage(img) - >>> frame.set_objects(objects) - >>> frame.get_objects() - [ - {'_id_': 1, 'label': 'cat'}, - {'_id_': 2, 'label': 'dog'} - ] - >>> frame.get_objects(obj_filter=lambda x:x['label'] == 'cat') - [{'_id_': 1, 'label': 'cat'}] - """ - - objects = [ - obj.copy() - for obj in filter(obj_filter, self.data['objects'].values()) - ] - - return objects diff --git a/mmpose/apis/webcam/utils/misc.py b/mmpose/apis/webcam/utils/misc.py deleted file mode 100644 index 6c6f5417ae..0000000000 --- a/mmpose/apis/webcam/utils/misc.py +++ /dev/null @@ -1,367 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import importlib -import os -import os.path as osp -import sys -import time -from contextlib import contextmanager -from typing import List, Optional, Tuple -from urllib.parse import urlparse -from urllib.request import urlopen - -import cv2 -import numpy as np -from mmengine import mkdir_or_exist -from torch.hub import HASH_REGEX, download_url_to_file - - -@contextmanager -def limit_max_fps(fps: float): - """A context manager to limit maximum frequence of entering the context. - - Args: - fps (float): The maximum frequence of entering the context - - Example:: - >>> from mmpose.apis.webcam.utils import limit_max_fps - >>> import cv2 - - >>> while True: - ... with limit_max_fps(20): - ... cv2.imshow(img) # display image at most 20 fps - """ - t_start = time.time() - try: - yield - finally: - t_end = time.time() - if fps is not None: - t_sleep = 1.0 / fps - t_end + t_start - if t_sleep > 0: - time.sleep(t_sleep) - - -def _is_url(filename: str) -> bool: - """Check if the file is a url link. - - Args: - filename (str): the file name or url link - - Returns: - bool: is url or not. - """ - prefixes = ['http://', 'https://'] - for p in prefixes: - if filename.startswith(p): - return True - return False - - -def load_image_from_disk_or_url(filename: str, - readFlag: int = cv2.IMREAD_COLOR - ) -> np.ndarray: - """Load an image file, from disk or url. - - Args: - filename (str): file name on the disk or url link - readFlag (int): readFlag for imdecode. Default: cv2.IMREAD_COLOR - - Returns: - np.ndarray: A loaded image - """ - if _is_url(filename): - # download the image, convert it to a NumPy array, and then read - # it into OpenCV format - resp = urlopen(filename) - image = np.asarray(bytearray(resp.read()), dtype='uint8') - image = cv2.imdecode(image, readFlag) - return image - else: - image = cv2.imread(filename, readFlag) - return image - - -def get_cached_file_path(url: str, - save_dir: str, - progress: bool = True, - check_hash: bool = False, - file_name: Optional[str] = None) -> str: - r"""Loads the Torch serialized object at the given URL. - - If downloaded file is a zip file, it will be automatically decompressed - - If the object is already present in `model_dir`, it's deserialized and - returned. - The default value of ``model_dir`` is ``/checkpoints`` where - ``hub_dir`` is the directory returned by :func:`~torch.hub.get_dir`. - - Args: - url (str): URL of the object to download - save_dir (str): directory in which to save the object - progress (bool): whether or not to display a progress bar - to stderr. Default: ``True`` - check_hash(bool): If True, the filename part of the URL - should follow the naming convention ``filename-.ext`` - where ```` is the first eight or more digits of the - SHA256 hash of the contents of the file. The hash is used to - ensure unique names and to verify the contents of the file. - Default: ``False`` - file_name (str, optional): name for the downloaded file. Filename - from ``url`` will be used if not set. Default: ``None``. - - Returns: - str: The path to the cached file. - """ - - mkdir_or_exist(save_dir) - - parts = urlparse(url) - filename = os.path.basename(parts.path) - if file_name is not None: - filename = file_name - cached_file = os.path.join(save_dir, filename) - if not os.path.exists(cached_file): - sys.stderr.write('Downloading: "{}" to {}\n'.format(url, cached_file)) - hash_prefix = None - if check_hash: - r = HASH_REGEX.search(filename) # r is Optional[Match[str]] - hash_prefix = r.group(1) if r else None - download_url_to_file(url, cached_file, hash_prefix, progress=progress) - return cached_file - - -def screen_matting(img: np.ndarray, - color_low: Optional[Tuple] = None, - color_high: Optional[Tuple] = None, - color: Optional[str] = None) -> np.ndarray: - """Get screen matting mask. - - Args: - img (np.ndarray): Image data. - color_low (tuple): Lower limit (b, g, r). - color_high (tuple): Higher limit (b, g, r). - color (str): Support colors include: - - - 'green' or 'g' - - 'blue' or 'b' - - 'black' or 'k' - - 'white' or 'w' - - Returns: - np.ndarray: A mask with the same shape of the input image. The value - is 0 at the pixels in the matting color range, and 1 everywhere else. - """ - - if color_high is None or color_low is None: - if color is not None: - if color.lower() == 'g' or color.lower() == 'green': - color_low = (0, 200, 0) - color_high = (60, 255, 60) - elif color.lower() == 'b' or color.lower() == 'blue': - color_low = (230, 0, 0) - color_high = (255, 40, 40) - elif color.lower() == 'k' or color.lower() == 'black': - color_low = (0, 0, 0) - color_high = (40, 40, 40) - elif color.lower() == 'w' or color.lower() == 'white': - color_low = (230, 230, 230) - color_high = (255, 255, 255) - else: - raise NotImplementedError(f'Not supported color: {color}.') - else: - raise ValueError( - 'color or color_high | color_low should be given.') - - mask = cv2.inRange(img, np.array(color_low), np.array(color_high)) == 0 - - return mask.astype(np.uint8) - - -def expand_and_clamp(box: List, im_shape: Tuple, scale: float = 1.25) -> List: - """Expand the bbox and clip it to fit the image shape. - - Args: - box (list): x1, y1, x2, y2 - im_shape (tuple): image shape (h, w, c) - scale (float): expand ratio - - Returns: - list: x1, y1, x2, y2 - """ - - x1, y1, x2, y2 = box[:4] - w = x2 - x1 - h = y2 - y1 - deta_w = w * (scale - 1) / 2 - deta_h = h * (scale - 1) / 2 - - x1, y1, x2, y2 = x1 - deta_w, y1 - deta_h, x2 + deta_w, y2 + deta_h - - img_h, img_w = im_shape[:2] - - x1 = min(max(0, int(x1)), img_w - 1) - y1 = min(max(0, int(y1)), img_h - 1) - x2 = min(max(0, int(x2)), img_w - 1) - y2 = min(max(0, int(y2)), img_h - 1) - - return [x1, y1, x2, y2] - - -def _find_bbox(mask): - """Find the bounding box for the mask. - - Args: - mask (ndarray): Mask. - - Returns: - list(4, ): Returned box (x1, y1, x2, y2). - """ - mask_shape = mask.shape - if len(mask_shape) == 3: - assert mask_shape[-1] == 1, 'the channel of the mask should be 1.' - elif len(mask_shape) == 2: - pass - else: - NotImplementedError() - - h, w = mask_shape[:2] - mask_w = mask.sum(0) - mask_h = mask.sum(1) - - left = 0 - right = w - 1 - up = 0 - down = h - 1 - - for i in range(w): - if mask_w[i] > 0: - break - left += 1 - - for i in range(w - 1, left, -1): - if mask_w[i] > 0: - break - right -= 1 - - for i in range(h): - if mask_h[i] > 0: - break - up += 1 - - for i in range(h - 1, up, -1): - if mask_h[i] > 0: - break - down -= 1 - - return [left, up, right, down] - - -def copy_and_paste( - img: np.ndarray, - background_img: np.ndarray, - mask: np.ndarray, - bbox: Optional[List] = None, - effect_region: Tuple = (0.2, 0.2, 0.8, 0.8), - min_size: Tuple = (20, 20) -) -> np.ndarray: - """Copy the image region and paste to the background. - - Args: - img (np.ndarray): Image data. - background_img (np.ndarray): Background image data. - mask (ndarray): instance segmentation result. - bbox (list, optional): instance bbox in (x1, y1, x2, y2). If not - given, the bbox will be obtained by ``_find_bbox()``. Default: - ``None`` - effect_region (tuple): The region to apply mask, the coordinates - are normalized (x1, y1, x2, y2). Default: (0.2, 0.2, 0.8, 0.8) - min_size (tuple): The minimum region size (w, h) in pixels. - Default: (20, 20) - - Returns: - np.ndarray: The background with pasted image region. - """ - background_img = background_img.copy() - background_h, background_w = background_img.shape[:2] - region_h = (effect_region[3] - effect_region[1]) * background_h - region_w = (effect_region[2] - effect_region[0]) * background_w - region_aspect_ratio = region_w / region_h - - if bbox is None: - bbox = _find_bbox(mask) - instance_w = bbox[2] - bbox[0] - instance_h = bbox[3] - bbox[1] - - if instance_w > min_size[0] and instance_h > min_size[1]: - aspect_ratio = instance_w / instance_h - if region_aspect_ratio > aspect_ratio: - resize_rate = region_h / instance_h - else: - resize_rate = region_w / instance_w - - mask_inst = mask[int(bbox[1]):int(bbox[3]), int(bbox[0]):int(bbox[2])] - img_inst = img[int(bbox[1]):int(bbox[3]), int(bbox[0]):int(bbox[2])] - img_inst = cv2.resize( - img_inst.astype('float32'), - (int(resize_rate * instance_w), int(resize_rate * instance_h))) - img_inst = img_inst.astype(background_img.dtype) - mask_inst = cv2.resize( - mask_inst.astype('float32'), - (int(resize_rate * instance_w), int(resize_rate * instance_h)), - interpolation=cv2.INTER_NEAREST) - - mask_ids = list(np.where(mask_inst == 1)) - mask_ids[1] += int(effect_region[0] * background_w) - mask_ids[0] += int(effect_region[1] * background_h) - - background_img[tuple(mask_ids)] = img_inst[np.where(mask_inst == 1)] - - return background_img - - -def is_image_file(path: str) -> bool: - """Check if a path is an image file by its extension. - - Args: - path (str): The image path. - - Returns: - bool: Weather the path is an image file. - """ - if isinstance(path, str): - if path.lower().endswith(('.png', '.jpg', '.jpeg', '.tiff', '.bmp')): - return True - return False - - -def get_config_path(path: str, module_name: str): - """Get config path from an OpenMMLab codebase. - - If the path is an existing file, it will be directly returned. If the file - doesn't exist, it will be searched in the 'configs' folder of the - specified module. - - Args: - path (str): the path of the config file - module_name (str): The module name of an OpenMMLab codebase - - Returns: - str: The config file path. - - Example:: - >>> path = 'configs/_base_/filters/one_euro.py' - >>> get_config_path(path, 'mmpose') - '/home/mmpose/configs/_base_/filters/one_euro.py' - """ - - if osp.isfile(path): - return path - - module = importlib.import_module(module_name) - module_dir = osp.dirname(module.__file__) - path_in_module = osp.join(module_dir, '.mim', path) - - if not osp.isfile(path_in_module): - raise FileNotFoundError(f'Can not find the config file "{path}"') - - return path_in_module diff --git a/mmpose/apis/webcam/utils/pose.py b/mmpose/apis/webcam/utils/pose.py deleted file mode 100644 index 8ff32f9e16..0000000000 --- a/mmpose/apis/webcam/utils/pose.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from typing import Dict, List, Tuple - - -def get_eye_keypoint_ids(dataset_meta: Dict) -> Tuple[int, int]: - """A helper function to get the keypoint indices of left and right eyes - from the dataset meta information. - - Args: - dataset_meta (dict): dataset meta information. - - Returns: - tuple[int, int]: The keypoint indices of left eye and right eye. - """ - left_eye_idx = None - right_eye_idx = None - - # try obtaining eye point ids from dataset_meta - keypoint_name2id = dataset_meta.get('keypoint_name2id', {}) - left_eye_idx = keypoint_name2id.get('left_eye', None) - right_eye_idx = keypoint_name2id.get('right_eye', None) - - if left_eye_idx is None or right_eye_idx is None: - # Fall back to hard coded keypoint id - dataset_name = dataset_meta.get('dataset_name', 'unknown dataset') - if dataset_name in {'coco', 'coco_wholebody'}: - left_eye_idx = 1 - right_eye_idx = 2 - elif dataset_name in {'animalpose', 'ap10k'}: - left_eye_idx = 0 - right_eye_idx = 1 - else: - raise ValueError('Can not determine the eye keypoint id of ' - f'{dataset_name}') - - return left_eye_idx, right_eye_idx - - -def get_face_keypoint_ids(dataset_meta: Dict) -> List: - """A helper function to get the keypoint indices of the face from the - dataset meta information. - - Args: - dataset_meta (dict): dataset meta information. - - Returns: - list[int]: face keypoint indices. The length depends on the dataset. - """ - face_indices = [] - - # try obtaining nose point ids from dataset_meta - keypoint_name2id = dataset_meta.get('keypoint_name2id', {}) - for id in range(68): - face_indices.append(keypoint_name2id.get(f'face-{id}', None)) - - if None in face_indices: - # Fall back to hard coded keypoint id - dataset_name = dataset_meta.get('dataset_name', 'unknown dataset') - if dataset_name in {'coco_wholebody'}: - face_indices = list(range(23, 91)) - else: - raise ValueError('Can not determine the face id of ' - f'{dataset_name}') - - return face_indices - - -def get_wrist_keypoint_ids(dataset_meta: Dict) -> Tuple[int, int]: - """A helper function to get the keypoint indices of left and right wrists - from the dataset meta information. - - Args: - dataset_meta (dict): dataset meta information. - Returns: - tuple[int, int]: The keypoint indices of left and right wrists. - """ - - # try obtaining wrist point ids from dataset_meta - keypoint_name2id = dataset_meta.get('keypoint_name2id', {}) - left_wrist_idx = keypoint_name2id.get('left_wrist', None) - right_wrist_idx = keypoint_name2id.get('right_wrist', None) - - if left_wrist_idx is None or right_wrist_idx is None: - # Fall back to hard coded keypoint id - dataset_name = dataset_meta.get('dataset_name', 'unknown dataset') - if dataset_name in {'coco', 'coco_wholebody'}: - left_wrist_idx = 9 - right_wrist_idx = 10 - elif dataset_name == 'animalpose': - left_wrist_idx = 16 - right_wrist_idx = 17 - elif dataset_name == 'ap10k': - left_wrist_idx = 7 - right_wrist_idx = 10 - else: - raise ValueError('Can not determine the eye keypoint id of ' - f'{dataset_name}') - - return left_wrist_idx, right_wrist_idx - - -def get_mouth_keypoint_ids(dataset_meta: Dict) -> int: - """A helper function to get the mouth keypoint index from the dataset meta - information. - - Args: - dataset_meta (dict): dataset meta information. - Returns: - int: The mouth keypoint index - """ - # try obtaining mouth point ids from dataset_info - keypoint_name2id = dataset_meta.get('keypoint_name2id', {}) - mouth_index = keypoint_name2id.get('face-62', None) - - if mouth_index is None: - # Fall back to hard coded keypoint id - dataset_name = dataset_meta.get('dataset_name', 'unknown dataset') - if dataset_name == 'coco_wholebody': - mouth_index = 85 - else: - raise ValueError('Can not determine the eye keypoint id of ' - f'{dataset_name}') - - return mouth_index - - -def get_hand_keypoint_ids(dataset_meta: Dict) -> List[int]: - """A helper function to get the keypoint indices of left and right hand - from the dataset meta information. - - Args: - dataset_meta (dict): dataset meta information. - Returns: - list[int]: hand keypoint indices. The length depends on the dataset. - """ - # try obtaining hand keypoint ids from dataset_meta - keypoint_name2id = dataset_meta.get('keypoint_name2id', {}) - hand_indices = [] - hand_indices.append(keypoint_name2id.get('left_hand_root', None)) - - for id in range(1, 5): - hand_indices.append(keypoint_name2id.get(f'left_thumb{id}', None)) - for id in range(1, 5): - hand_indices.append(keypoint_name2id.get(f'left_forefinger{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'left_middle_finger{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'left_ring_finger{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'left_pinky_finger{id}', None)) - - hand_indices.append(keypoint_name2id.get('right_hand_root', None)) - - for id in range(1, 5): - hand_indices.append(keypoint_name2id.get(f'right_thumb{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'right_forefinger{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'right_middle_finger{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'right_ring_finger{id}', None)) - for id in range(1, 5): - hand_indices.append( - keypoint_name2id.get(f'right_pinky_finger{id}', None)) - - if None in hand_indices: - # Fall back to hard coded keypoint id - dataset_name = dataset_meta.get('dataset_name', 'unknown dataset') - if dataset_name in {'coco_wholebody'}: - hand_indices = list(range(91, 133)) - else: - raise ValueError('Can not determine the hand id of ' - f'{dataset_name}') - - return hand_indices diff --git a/mmpose/apis/webcam/webcam_executor.py b/mmpose/apis/webcam/webcam_executor.py deleted file mode 100644 index f39aa4b847..0000000000 --- a/mmpose/apis/webcam/webcam_executor.py +++ /dev/null @@ -1,329 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import logging -import sys -import time -import warnings -from threading import Thread -from typing import Dict, List, Optional, Tuple, Union - -import cv2 - -from .nodes import NODES -from .utils import (BufferManager, EventManager, FrameMessage, ImageCapture, - VideoEndingMessage, is_image_file, limit_max_fps) - -try: - from contextlib import nullcontext -except ImportError: - # compatible with python3.6 - from contextlib import contextmanager - - @contextmanager - def nullcontext(enter_result=None): - yield enter_result - - -DEFAULT_FRAME_BUFFER_SIZE = 1 -DEFAULT_INPUT_BUFFER_SIZE = 1 -DEFAULT_DISPLAY_BUFFER_SIZE = 0 -DEFAULT_USER_BUFFER_SIZE = 1 - -logger = logging.getLogger('Executor') - - -class WebcamExecutor(): - """The interface to build and execute webcam applications from configs. - - Parameters: - nodes (list[dict]): Node configs. See :class:`webcam.nodes.Node` for - details - name (str): Executor name. Default: 'MMPose Webcam App'. - camera_id (int | str): The camera ID (usually the ID of the default - camera is 0). Alternatively a file path or a URL can be given - to load from a video or image file. - camera_frame_shape (tuple, optional): Set the frame shape of the - camera in (width, height). If not given, the default frame shape - will be used. This argument is only valid when using a camera - as the input source. Default: ``None`` - camera_max_fps (int): Video reading maximum FPS. Default: 30 - buffer_sizes (dict, optional): A dict to specify buffer sizes. The - key is the buffer name and the value is the buffer size. - Default: ``None`` - - Example:: - >>> cfg = dict( - >>> name='Test Webcam', - >>> camera_id=0, - >>> camera_max_fps=30, - >>> nodes=[ - >>> dict( - >>> type='MonitorNode', - >>> name='monitor', - >>> enable_key='m', - >>> enable=False, - >>> input_buffer='_frame_', - >>> output_buffer='display'), - >>> dict( - >>> type='RecorderNode', - >>> name='recorder', - >>> out_video_file='webcam_output.mp4', - >>> input_buffer='display', - >>> output_buffer='_display_') - >>> ]) - - >>> executor = WebcamExecutor(**cfg) - """ - - def __init__(self, - nodes: List[Dict], - name: str = 'MMPose Webcam App', - camera_id: Union[int, str] = 0, - camera_max_fps: int = 30, - camera_frame_shape: Optional[Tuple[int, int]] = None, - synchronous: bool = False, - buffer_sizes: Optional[Dict[str, int]] = None): - - # Basic parameters - self.name = name - self.camera_id = camera_id - self.camera_max_fps = camera_max_fps - self.camera_frame_shape = camera_frame_shape - self.synchronous = synchronous - - # self.buffer_manager manages data flow between executor and nodes - self.buffer_manager = BufferManager() - # self.event_manager manages event-based asynchronous communication - self.event_manager = EventManager() - # self.node_list holds all node instance - self.node_list = [] - # self.vcap is used to read camera frames. It will be built when the - # executor starts running - self.vcap = None - - # Register executor events - self.event_manager.register_event('_exit_', is_keyboard=False) - if self.synchronous: - self.event_manager.register_event('_idle_', is_keyboard=False) - - # Register nodes - if not nodes: - raise ValueError('No node is registered to the executor.') - - # Register default buffers - if buffer_sizes is None: - buffer_sizes = {} - # _frame_ buffer - frame_buffer_size = buffer_sizes.get('_frame_', - DEFAULT_FRAME_BUFFER_SIZE) - self.buffer_manager.register_buffer('_frame_', frame_buffer_size) - # _input_ buffer - input_buffer_size = buffer_sizes.get('_input_', - DEFAULT_INPUT_BUFFER_SIZE) - self.buffer_manager.register_buffer('_input_', input_buffer_size) - # _display_ buffer - display_buffer_size = buffer_sizes.get('_display_', - DEFAULT_DISPLAY_BUFFER_SIZE) - self.buffer_manager.register_buffer('_display_', display_buffer_size) - - # Build all nodes: - for node_cfg in nodes: - logger.info(f'Create node: {node_cfg.name}({node_cfg.type})') - node = NODES.build(node_cfg) - - # Register node - self.node_list.append(node) - - # Register buffers - for buffer_info in node.registered_buffers: - buffer_name = buffer_info.buffer_name - if buffer_name in self.buffer_manager: - continue - buffer_size = buffer_sizes.get(buffer_name, - DEFAULT_USER_BUFFER_SIZE) - self.buffer_manager.register_buffer(buffer_name, buffer_size) - logger.info( - f'Register user buffer: {buffer_name}({buffer_size})') - - # Register events - for event_info in node.registered_events: - self.event_manager.register_event( - event_name=event_info.event_name, - is_keyboard=event_info.is_keyboard) - logger.info(f'Register event: {event_info.event_name}') - - # Set executor for nodes - # This step is performed after node building when the executor has - # create full buffer/event managers and can - for node in self.node_list: - logger.info(f'Set executor for node: {node.name})') - node.set_executor(self) - - def _read_camera(self): - """Read video frames from the caemra (or the source video/image) and - put them into input buffers.""" - - camera_id = self.camera_id - fps = self.camera_max_fps - - # Build video capture - if is_image_file(camera_id): - self.vcap = ImageCapture(camera_id) - else: - self.vcap = cv2.VideoCapture(camera_id) - if self.camera_frame_shape is not None: - width, height = self.camera_frame_shape - self.vcap.set(cv2.CAP_PROP_FRAME_WIDTH, width) - self.vcap.set(cv2.CAP_PROP_FRAME_HEIGHT, height) - - if not self.vcap.isOpened(): - warnings.warn(f'Cannot open camera (ID={camera_id})') - sys.exit() - - # Read video frames in a loop - first_frame = True - while not self.event_manager.is_set('_exit_'): - if self.synchronous: - if first_frame: - cm = nullcontext() - else: - # Read a new frame until the last frame has been processed - cm = self.event_manager.wait_and_handle('_idle_') - else: - # Read frames with a maximum FPS - cm = limit_max_fps(fps) - - first_frame = False - - with cm: - # Read a frame - ret_val, frame = self.vcap.read() - if ret_val: - # Put frame message (for display) into buffer `_frame_` - frame_msg = FrameMessage(frame) - self.buffer_manager.put('_frame_', frame_msg) - - # Put input message (for model inference or other use) - # into buffer `_input_` - input_msg = FrameMessage(frame.copy()) - input_msg.update_route_info( - node_name='Camera Info', - node_type='none', - info=self._get_camera_info()) - self.buffer_manager.put_force('_input_', input_msg) - logger.info('Read one frame.') - else: - logger.info('Reached the end of the video.') - # Put a video ending signal - self.buffer_manager.put_force('_frame_', - VideoEndingMessage()) - self.buffer_manager.put_force('_input_', - VideoEndingMessage()) - # Wait for `_exit_` event util a timeout occurs - if not self.event_manager.wait('_exit_', timeout=5.0): - break - - self.vcap.release() - - def _display(self): - """Receive processed frames from the output buffer and display on - screen.""" - - output_msg = None - - while not self.event_manager.is_set('_exit_'): - while self.buffer_manager.is_empty('_display_'): - time.sleep(0.001) - - # Set _idle_ to allow reading next frame - if self.synchronous: - self.event_manager.set('_idle_') - - # acquire output from buffer - output_msg = self.buffer_manager.get('_display_') - - # None indicates input stream ends - if isinstance(output_msg, VideoEndingMessage): - self.event_manager.set('_exit_') - break - - img = output_msg.get_image() - - # show in a window - cv2.imshow(self.name, img) - - # handle keyboard input - key = cv2.waitKey(1) - if key != -1: - self._on_keyboard_input(key) - - cv2.destroyAllWindows() - - # Avoid dead lock - if self.synchronous: - self.event_manager.set('_idle_') - - def _on_keyboard_input(self, key): - """Handle the keyboard input. - - The key 'Q' and `ESC` will trigger an '_exit_' event, which will be - responded by all nodes and the executor itself to exit. Other keys will - trigger keyboard event to be responded by the nodes which has - registered corresponding event. See :class:`webcam.utils.EventManager` - for details. - """ - - if key in (27, ord('q'), ord('Q')): - logger.info(f'Exit event captured: {key}') - self.event_manager.set('_exit_') - else: - logger.info(f'Keyboard event captured: {key}') - self.event_manager.set(key, is_keyboard=True) - - def _get_camera_info(self): - """Return the camera information in a dict.""" - - frame_width = self.vcap.get(cv2.CAP_PROP_FRAME_WIDTH) - frame_height = self.vcap.get(cv2.CAP_PROP_FRAME_HEIGHT) - frame_rate = self.vcap.get(cv2.CAP_PROP_FPS) - - cam_info = { - 'Camera ID': self.camera_id, - 'Camera resolution': f'{frame_width}x{frame_height}', - 'Camera FPS': frame_rate, - } - - return cam_info - - def run(self): - """Start the executor. - - This method starts all nodes as well as video I/O in separate threads. - """ - - try: - # Start node threads - non_daemon_nodes = [] - for node in self.node_list: - node.start() - if not node.daemon: - non_daemon_nodes.append(node) - - # Create a thread to read video frames - t_read = Thread(target=self._read_camera, args=()) - t_read.start() - - # Run display in the main thread - self._display() - logger.info('Display has stopped.') - - # joint non-daemon nodes and executor threads - logger.info('Camera reading is about to join.') - t_read.join() - - for node in non_daemon_nodes: - logger.info(f'Node {node.name} is about to join.') - node.join() - logger.info('All nodes jointed successfully.') - - except KeyboardInterrupt: - pass diff --git a/requirements/mminstall.txt b/requirements/mminstall.txt index 24be7462fc..30d8402a42 100644 --- a/requirements/mminstall.txt +++ b/requirements/mminstall.txt @@ -1,3 +1,3 @@ mmcv>=2.0.0,<2.1.0 -mmdet>=3.0.0,<3.1.0 +mmdet>=3.0.0,<3.2.0 mmengine>=0.4.0,<1.0.0 diff --git a/tests/test_apis/test_inferencers/test_mmpose_inferencer.py b/tests/test_apis/test_inferencers/test_mmpose_inferencer.py index f679df27b6..8b8a4744b8 100644 --- a/tests/test_apis/test_inferencers/test_mmpose_inferencer.py +++ b/tests/test_apis/test_inferencers/test_mmpose_inferencer.py @@ -11,10 +11,15 @@ from mmpose.apis.inferencers import MMPoseInferencer from mmpose.structures import PoseDataSample +from mmpose.utils import register_all_modules class TestMMPoseInferencer(TestCase): + def tearDown(self) -> None: + register_all_modules(init_default_scope=True) + return super().tearDown() + def test_pose2d_call(self): try: from mmdet.apis.det_inferencer import DetInferencer # noqa: F401 diff --git a/tests/test_apis/test_inferencers/test_pose2d_inferencer.py b/tests/test_apis/test_inferencers/test_pose2d_inferencer.py index 63206631ba..b59232efac 100644 --- a/tests/test_apis/test_inferencers/test_pose2d_inferencer.py +++ b/tests/test_apis/test_inferencers/test_pose2d_inferencer.py @@ -13,10 +13,15 @@ from mmpose.apis.inferencers import Pose2DInferencer from mmpose.structures import PoseDataSample +from mmpose.utils import register_all_modules class TestPose2DInferencer(TestCase): + def tearDown(self) -> None: + register_all_modules(init_default_scope=True) + return super().tearDown() + def _get_det_model_weights(self): if platform.system().lower() == 'windows': # the default human/animal pose estimator utilizes rtmdet-m diff --git a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py index 4a3f5a613e..da4a34b160 100644 --- a/tests/test_apis/test_inferencers/test_pose3d_inferencer.py +++ b/tests/test_apis/test_inferencers/test_pose3d_inferencer.py @@ -12,10 +12,15 @@ from mmpose.apis.inferencers import Pose2DInferencer, Pose3DInferencer from mmpose.structures import PoseDataSample +from mmpose.utils import register_all_modules class TestPose3DInferencer(TestCase): + def tearDown(self) -> None: + register_all_modules(init_default_scope=True) + return super().tearDown() + def _get_det_model_weights(self): if platform.system().lower() == 'windows': # the default human/animal pose estimator utilizes rtmdet-m diff --git a/tests/test_apis/test_webcam/test_nodes/test_big_eye_effect_node.py b/tests/test_apis/test_webcam/test_nodes/test_big_eye_effect_node.py deleted file mode 100644 index b5a8ee8f72..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_big_eye_effect_node.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv -import numpy as np -from mmengine import Config - -from mmpose.apis.webcam.nodes import BigeyeEffectNode -from mmpose.apis.webcam.utils.message import FrameMessage -from mmpose.datasets.datasets.utils import parse_pose_metainfo - - -class TestBigeyeEffectNode(unittest.TestCase): - - def setUp(self) -> None: - self.node = BigeyeEffectNode( - name='big-eye', input_buffer='vis', output_buffer='vis_bigeye') - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - h, w = image.shape[:2] - msg.set_image(image) - - objects = [ - dict( - bbox=np.array([285.1, 44.4, 510.2, 387.7]), - keypoints=np.stack((np.random.rand(17) * - (w - 1), np.random.rand(17) * (h - 1)), - axis=1), - keypoint_scores=np.ones(17), - dataset_meta=parse_pose_metainfo( - Config.fromfile('configs/_base_/datasets/coco.py') - ['dataset_info'])) - ] - msg.update_objects(objects) - - return msg - - def test_process(self): - input_msg = self._get_input_msg() - img_h, img_w = input_msg.get_image().shape[:2] - self.assertEqual(len(input_msg.get_objects()), 1) - - output_msg = self.node.process(dict(input=input_msg)) - canvas = output_msg.get_image() - self.assertIsInstance(canvas, np.ndarray) - self.assertEqual(canvas.shape[0], img_h) - self.assertEqual(canvas.shape[1], img_w) - - def test_bypass(self): - input_msg = self._get_input_msg() - img = input_msg.get_image().copy() - output_msg = self.node.bypass(dict(input=input_msg)) - self.assertTrue((img == output_msg.get_image()).all()) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_detector_node.py b/tests/test_apis/test_webcam/test_nodes/test_detector_node.py deleted file mode 100644 index b519744fee..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_detector_node.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv - -from mmpose.apis.webcam.nodes import DetectorNode -from mmpose.apis.webcam.utils.message import FrameMessage - - -class TestDetectorNode(unittest.TestCase): - model_config = dict( - name='detector', - model_config='demo/mmdetection_cfg/' - 'ssdlite_mobilenetv2-scratch_8xb24-600e_coco.py', - model_checkpoint='https://download.openmmlab.com' - '/mmdetection/v2.0/ssd/' - 'ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_' - 'scratch_600e_coco_20210629_110627-974d9307.pth', - device='cpu', - input_buffer='_input_', - output_buffer='det_result') - - def setUp(self) -> None: - self._has_mmdet = True - try: - from mmdet.apis import init_detector # noqa: F401 - except (ImportError, ModuleNotFoundError): - self._has_mmdet = False - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - msg.set_image(image) - - return msg - - def test_init(self): - - if not self._has_mmdet: - return unittest.skip('mmdet is not installed') - - node = DetectorNode(**self.model_config) - - self.assertEqual(len(node._input_buffers), 1) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, '_input_') - self.assertEqual(node._output_buffers[0].buffer_name, 'det_result') - self.assertEqual(node.device, 'cpu') - - def test_process(self): - - if not self._has_mmdet: - return unittest.skip('mmdet is not installed') - - node = DetectorNode(**self.model_config) - - input_msg = self._get_input_msg() - self.assertEqual(len(input_msg.get_objects()), 0) - - output_msg = node.process(dict(input=input_msg)) - objects = output_msg.get_objects() - # there is a person in the image - self.assertGreaterEqual(len(objects), 1) - self.assertIn('person', [obj['label'] for obj in objects]) - self.assertEqual(objects[0]['bbox'].shape, (4, )) - - def test_bypass(self): - - if not self._has_mmdet: - return unittest.skip('mmdet is not installed') - - node = DetectorNode(**self.model_config) - - input_msg = self._get_input_msg() - self.assertEqual(len(input_msg.get_objects()), 0) - - output_msg = node.bypass(dict(input=input_msg)) - self.assertEqual(len(output_msg.get_objects()), 0) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_monitor_node.py b/tests/test_apis/test_webcam/test_nodes/test_monitor_node.py deleted file mode 100644 index d71654cc39..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_monitor_node.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv - -from mmpose.apis.webcam.nodes import MonitorNode -from mmpose.apis.webcam.utils.message import FrameMessage - - -class TestMonitorNode(unittest.TestCase): - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - msg.set_image(image) - - objects = [dict(label='human')] - msg.update_objects(objects) - - return msg - - def test_init(self): - node = MonitorNode( - name='monitor', input_buffer='_frame_', output_buffer='display') - self.assertEqual(len(node._input_buffers), 1) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, '_frame_') - self.assertEqual(node._output_buffers[0].buffer_name, 'display') - - # test initialization with given ignore_items - node = MonitorNode( - name='monitor', - input_buffer='_frame_', - output_buffer='display', - ignore_items=['ignore_item']) - self.assertEqual(len(node.ignore_items), 1) - self.assertEqual(node.ignore_items[0], 'ignore_item') - - def test_process(self): - node = MonitorNode( - name='monitor', input_buffer='_frame_', output_buffer='display') - - input_msg = self._get_input_msg() - self.assertEqual(len(input_msg.get_route_info()), 0) - img_shape = input_msg.get_image().shape - - output_msg = node.process(dict(input=input_msg)) - # 'System Info' will be added into route_info - self.assertEqual(len(output_msg.get_route_info()), 1) - self.assertEqual(output_msg.get_image().shape, img_shape) - - def test_bypass(self): - node = MonitorNode( - name='monitor', input_buffer='_frame_', output_buffer='display') - input_msg = self._get_input_msg() - self.assertEqual(len(input_msg.get_route_info()), 0) - - output_msg = node.bypass(dict(input=input_msg)) - # output_msg should be identity with input_msg - self.assertEqual(len(output_msg.get_route_info()), 0) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_notice_board_node.py b/tests/test_apis/test_webcam/test_nodes/test_notice_board_node.py deleted file mode 100644 index 31583bf815..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_notice_board_node.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv -import numpy as np - -from mmpose.apis.webcam.nodes import NoticeBoardNode -from mmpose.apis.webcam.utils.message import FrameMessage - - -class TestNoticeBoardNode(unittest.TestCase): - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - h, w = image.shape[:2] - msg.set_image(image) - - return msg - - def test_init(self): - node = NoticeBoardNode( - name='instruction', input_buffer='vis', output_buffer='vis_notice') - - self.assertEqual(len(node._input_buffers), 1) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, 'vis') - self.assertEqual(node._output_buffers[0].buffer_name, 'vis_notice') - self.assertEqual(len(node.content_lines), 1) - - node = NoticeBoardNode( - name='instruction', - input_buffer='vis', - output_buffer='vis_notice', - content_lines=[ - 'This is a demo for pose visualization and simple image ' - 'effects. Have fun!', '', 'Hot-keys:', - '"v": Pose estimation result visualization', - '"s": Sunglasses effect B-)', '"b": Big-eye effect 0_0', - '"h": Show help information', - '"m": Show diagnostic information', '"q": Exit' - ]) - self.assertEqual(len(node.content_lines), 9) - - def test_draw(self): - node = NoticeBoardNode( - name='instruction', input_buffer='vis', output_buffer='vis_notice') - input_msg = self._get_input_msg() - img_h, img_w = input_msg.get_image().shape[:2] - - canvas = node.draw(input_msg) - self.assertIsInstance(canvas, np.ndarray) - self.assertEqual(canvas.shape[0], img_h) - self.assertEqual(canvas.shape[1], img_w) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_object_assigner_node.py b/tests/test_apis/test_webcam/test_nodes/test_object_assigner_node.py deleted file mode 100644 index 0405c885d7..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_object_assigner_node.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import time -import unittest - -import mmcv -import numpy as np - -from mmpose.apis.webcam.nodes import ObjectAssignerNode -from mmpose.apis.webcam.utils.message import FrameMessage - - -class TestObjectAssignerNode(unittest.TestCase): - - def _get_input_msg(self, with_object: bool = False): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - msg.set_image(image) - - if with_object: - objects = [ - dict( - label='person', - class_id=0, - bbox=np.array([285.1, 44.4, 510.2, 387.7])) - ] - msg.update_objects(objects) - - return msg - - def test_init(self): - node = ObjectAssignerNode( - name='object assigner', - frame_buffer='_frame_', - object_buffer='pred_result', - output_buffer='frame') - - self.assertEqual(len(node._input_buffers), 2) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, 'pred_result') - self.assertEqual(node._input_buffers[1].buffer_name, '_frame_') - self.assertEqual(node._output_buffers[0].buffer_name, 'frame') - - def test_process(self): - node = ObjectAssignerNode( - name='object assigner', - frame_buffer='_frame_', - object_buffer='pred_result', - output_buffer='frame') - - frame_msg = self._get_input_msg() - object_msg = self._get_input_msg(with_object=True) - self.assertEqual(len(frame_msg.get_objects()), 0) - self.assertEqual(len(object_msg.get_objects()), 1) - - # node.synchronous is False - output_msg = node.process(dict(frame=frame_msg, object=object_msg)) - objects = output_msg.get_objects() - self.assertEqual(id(frame_msg), id(output_msg)) - self.assertEqual(objects[0]['_id_'], - object_msg.get_objects()[0]['_id_']) - - # object_message is None - # take a pause to increase the interval of messages' timestamp - # to avoid ZeroDivisionError when computing fps in `process` - time.sleep(1 / 30.0) - frame_msg = self._get_input_msg() - output_msg = node.process(dict(frame=frame_msg, object=None)) - objects = output_msg.get_objects() - self.assertEqual(objects[0]['_id_'], - object_msg.get_objects()[0]['_id_']) - - # node.synchronous is True - node.synchronous = True - time.sleep(1 / 30.0) - frame_msg = self._get_input_msg() - object_msg = self._get_input_msg(with_object=True) - output_msg = node.process(dict(frame=frame_msg, object=object_msg)) - self.assertEqual(len(frame_msg.get_objects()), 0) - self.assertEqual(id(object_msg), id(output_msg)) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_object_visualizer_node.py b/tests/test_apis/test_webcam/test_nodes/test_object_visualizer_node.py deleted file mode 100644 index c55bc1eb8d..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_object_visualizer_node.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv -import numpy as np -from mmengine import Config - -from mmpose.apis.webcam.nodes import ObjectVisualizerNode -from mmpose.apis.webcam.utils.message import FrameMessage -from mmpose.datasets.datasets.utils import parse_pose_metainfo - - -class TestObjectVisualizerNode(unittest.TestCase): - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - h, w = image.shape[:2] - msg.set_image(image) - - objects = [ - dict( - label='person', - class_id=0, - bbox=np.array([285.1, 44.4, 510.2, 387.7]), - keypoints=np.stack((np.random.rand(17) * - (w - 1), np.random.rand(17) * (h - 1)), - axis=1), - keypoint_scores=np.ones(17), - dataset_meta=parse_pose_metainfo( - Config.fromfile('configs/_base_/datasets/coco.py') - ['dataset_info'])) - ] - msg.update_objects(objects) - - return msg - - def test_init(self): - node = ObjectVisualizerNode( - name='object visualizer', - input_buffer='frame', - output_buffer='vis') - - self.assertEqual(len(node._input_buffers), 1) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, 'frame') - self.assertEqual(node._output_buffers[0].buffer_name, 'vis') - - def test_draw(self): - # draw all objects with bounding box - node = ObjectVisualizerNode( - name='object visualizer', - input_buffer='frame', - output_buffer='vis') - input_msg = self._get_input_msg() - img_h, img_w = input_msg.get_image().shape[:2] - self.assertEqual(len(input_msg.get_objects()), 1) - - canvas = node.draw(input_msg) - self.assertIsInstance(canvas, np.ndarray) - self.assertEqual(canvas.shape[0], img_h) - self.assertEqual(canvas.shape[1], img_w) - - # draw all objects with keypoints - node = ObjectVisualizerNode( - name='object visualizer', - input_buffer='frame', - output_buffer='vis', - must_have_keypoint=True) - canvas = node.draw(input_msg) - self.assertIsInstance(canvas, np.ndarray) - self.assertEqual(canvas.shape[0], img_h) - self.assertEqual(canvas.shape[1], img_w) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_pose_estimator_node.py b/tests/test_apis/test_webcam/test_nodes/test_pose_estimator_node.py deleted file mode 100644 index 43345d116a..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_pose_estimator_node.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest -from copy import deepcopy - -import mmcv -import numpy as np - -from mmpose.apis.webcam.nodes import TopdownPoseEstimatorNode -from mmpose.apis.webcam.utils.message import FrameMessage - - -class TestTopdownPoseEstimatorNode(unittest.TestCase): - model_config = dict( - name='human pose estimator', - model_config='configs/wholebody_2d_keypoint/' - 'topdown_heatmap/coco-wholebody/' - 'td-hm_vipnas-mbv3_dark-8xb64-210e_coco-wholebody-256x192.py', - model_checkpoint='https://download.openmmlab.com/mmpose/' - 'top_down/vipnas/vipnas_mbv3_coco_wholebody_256x192_dark' - '-e2158108_20211205.pth', - device='cpu', - input_buffer='det_result', - output_buffer='human_pose') - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - msg.set_image(image) - - objects = [ - dict( - label='person', - class_id=0, - bbox=np.array([285.1, 44.4, 510.2, 387.7])) - ] - msg.update_objects(objects) - - return msg - - def test_init(self): - node = TopdownPoseEstimatorNode(**self.model_config) - - self.assertEqual(len(node._input_buffers), 1) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, 'det_result') - self.assertEqual(node._output_buffers[0].buffer_name, 'human_pose') - self.assertEqual(node.device, 'cpu') - - def test_process(self): - node = TopdownPoseEstimatorNode(**self.model_config) - - input_msg = self._get_input_msg() - self.assertEqual(len(input_msg.get_objects()), 1) - - # run inference on all objects - output_msg = node.process(dict(input=input_msg)) - objects = output_msg.get_objects() - - # there is a person in the image - self.assertGreaterEqual(len(objects), 1) - self.assertIn('person', [obj['label'] for obj in objects]) - self.assertEqual(objects[0]['keypoints'].shape, (133, 2)) - self.assertEqual(objects[0]['keypoint_scores'].shape, (133, )) - - # select objects by class_id - model_config = self.model_config.copy() - model_config['class_ids'] = [0] - node = TopdownPoseEstimatorNode(**model_config) - output_msg = node.process(dict(input=input_msg)) - self.assertGreaterEqual(len(objects), 1) - - # select objects by label - model_config = self.model_config.copy() - model_config['labels'] = ['cat'] - node = TopdownPoseEstimatorNode(**model_config) - output_msg = node.process(dict(input=input_msg)) - self.assertGreaterEqual(len(objects), 0) - - def test_bypass(self): - node = TopdownPoseEstimatorNode(**self.model_config) - - input_msg = self._get_input_msg() - input_objects = input_msg.get_objects() - - output_msg = node.bypass(dict(input=deepcopy(input_msg))) - output_objects = output_msg.get_objects() - self.assertEqual(len(input_objects), len(output_objects)) - self.assertListEqual( - list(input_objects[0].keys()), list(output_objects[0].keys())) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_recorder_node.py b/tests/test_apis/test_webcam/test_nodes/test_recorder_node.py deleted file mode 100644 index a646abb430..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_recorder_node.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import os -import unittest - -import mmcv - -from mmpose.apis.webcam.nodes import RecorderNode -from mmpose.apis.webcam.utils.message import FrameMessage - - -class TestMonitorNode(unittest.TestCase): - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - msg.set_image(image) - - objects = [dict(label='human')] - msg.update_objects(objects) - - return msg - - def test_init(self): - node = RecorderNode( - name='recorder', - out_video_file='webcam_output.mp4', - input_buffer='display', - output_buffer='_display_') - self.assertEqual(len(node._input_buffers), 1) - self.assertEqual(len(node._output_buffers), 1) - self.assertEqual(node._input_buffers[0].buffer_name, 'display') - self.assertEqual(node._output_buffers[0].buffer_name, '_display_') - self.assertTrue(node.t_record.is_alive()) - - def test_process(self): - node = RecorderNode( - name='recorder', - out_video_file='webcam_output.mp4', - input_buffer='display', - output_buffer='_display_', - buffer_size=1) - - if os.path.exists('webcam_output.mp4'): - os.remove('webcam_output.mp4') - - input_msg = self._get_input_msg() - node.process(dict(input=input_msg)) - self.assertEqual(node.queue.qsize(), 1) - - # process 5 frames in total. - # the first frame has been processed above - for _ in range(4): - node.process(dict(input=input_msg)) - node.on_exit() - - # check the properties of output video - self.assertTrue(os.path.exists('webcam_output.mp4')) - video = mmcv.VideoReader('webcam_output.mp4') - self.assertEqual(video.frame_cnt, 5) - self.assertEqual(video.fps, 30) - video.vcap.release() - os.remove('webcam_output.mp4') - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_nodes/test_sunglasses_effect_node.py b/tests/test_apis/test_webcam/test_nodes/test_sunglasses_effect_node.py deleted file mode 100644 index 1bf1c8199d..0000000000 --- a/tests/test_apis/test_webcam/test_nodes/test_sunglasses_effect_node.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv -import numpy as np -from mmengine import Config - -from mmpose.apis.webcam.nodes import SunglassesEffectNode -from mmpose.apis.webcam.utils.message import FrameMessage -from mmpose.datasets.datasets.utils import parse_pose_metainfo - - -class TestSunglassesEffectNode(unittest.TestCase): - - def setUp(self) -> None: - self.node = SunglassesEffectNode( - name='sunglasses', - input_buffer='vis', - output_buffer='vis_sunglasses') - - def _get_input_msg(self): - - msg = FrameMessage(None) - - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - h, w = image.shape[:2] - msg.set_image(image) - - objects = [ - dict( - keypoints=np.stack((np.random.rand(17) * - (w - 1), np.random.rand(17) * (h - 1)), - axis=1), - keypoint_scores=np.ones(17), - dataset_meta=parse_pose_metainfo( - Config.fromfile('configs/_base_/datasets/coco.py') - ['dataset_info'])) - ] - msg.update_objects(objects) - - return msg - - def test_process(self): - input_msg = self._get_input_msg() - img_h, img_w = input_msg.get_image().shape[:2] - self.assertEqual(len(input_msg.get_objects()), 1) - - output_msg = self.node.process(dict(input=input_msg)) - canvas = output_msg.get_image() - self.assertIsInstance(canvas, np.ndarray) - self.assertEqual(canvas.shape[0], img_h) - self.assertEqual(canvas.shape[1], img_w) - - def test_bypass(self): - input_msg = self._get_input_msg() - img = input_msg.get_image().copy() - output_msg = self.node.bypass(dict(input=input_msg)) - self.assertTrue((img == output_msg.get_image()).all()) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_utils/test_buffer.py b/tests/test_apis/test_webcam/test_utils/test_buffer.py deleted file mode 100644 index 2708433ac1..0000000000 --- a/tests/test_apis/test_webcam/test_utils/test_buffer.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest -from queue import Queue - -from mmpose.apis.webcam.utils.buffer import Buffer, BufferManager - - -class TestBuffer(unittest.TestCase): - - def test_buffer(self): - - buffer = Buffer(maxsize=1) - for i in range(3): - buffer.put_force(i) - item = buffer.get() - self.assertEqual(item, 2) - - -class TestBufferManager(unittest.TestCase): - - def _get_buffer_dict(self): - return dict(example_buffer=Buffer()) - - def test_init(self): - - # test default initialization - buffer_manager = BufferManager() - self.assertIn('_buffers', dir(buffer_manager)) - self.assertIsInstance(buffer_manager._buffers, dict) - - # test initialization with given buffers - buffers = self._get_buffer_dict() - buffer_manager = BufferManager(buffers=buffers) - self.assertIn('_buffers', dir(buffer_manager)) - self.assertIsInstance(buffer_manager._buffers, dict) - self.assertIn('example_buffer', buffer_manager._buffers.keys()) - # test __contains__ - self.assertIn('example_buffer', buffer_manager) - - # test initialization with incorrect buffers - buffers['incorrect_buffer'] = Queue() - with self.assertRaises(ValueError): - buffer_manager = BufferManager(buffers=buffers) - - def test_buffer_operations(self): - buffer_manager = BufferManager() - - # test register_buffer - buffer_manager.register_buffer('example_buffer', 1) - self.assertIn('example_buffer', buffer_manager) - self.assertEqual(buffer_manager._buffers['example_buffer'].maxsize, 1) - - # test buffer operations - buffer_manager.put('example_buffer', 0) - item = buffer_manager.get('example_buffer') - self.assertEqual(item, 0) - - buffer_manager.put('example_buffer', 0) - self.assertTrue(buffer_manager.is_full('example_buffer')) - buffer_manager.put_force('example_buffer', 1) - item = buffer_manager.get('example_buffer') - self.assertEqual(item, 1) - self.assertTrue(buffer_manager.is_empty('example_buffer')) - - # test get_info - buffer_info = buffer_manager.get_info() - self.assertIn('example_buffer', buffer_info) - self.assertEqual(buffer_info['example_buffer']['size'], 0) - self.assertEqual(buffer_info['example_buffer']['maxsize'], 1) - - # test get_sub_manager - buffer_manager = buffer_manager.get_sub_manager(['example_buffer']) - self.assertIsInstance(buffer_manager, BufferManager) - self.assertIn('example_buffer', buffer_manager) - self.assertEqual(buffer_manager._buffers['example_buffer'].maxsize, 1) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_utils/test_event.py b/tests/test_apis/test_webcam/test_utils/test_event.py deleted file mode 100644 index 7ff4b234bd..0000000000 --- a/tests/test_apis/test_webcam/test_utils/test_event.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest -from threading import Event - -from mmpose.apis.webcam.utils.event import EventManager - - -class TestEventManager(unittest.TestCase): - - def test_event_manager(self): - event_manager = EventManager() - - # test register_event - event_manager.register_event('example_event') - self.assertIn('example_event', event_manager._events) - self.assertIsInstance(event_manager._events['example_event'], Event) - self.assertFalse(event_manager.is_set('example_event')) - - # test event operations - event_manager.set('q', is_keyboard=True) - self.assertIn('_keyboard_q', event_manager._events) - self.assertTrue(event_manager.is_set('q', is_keyboard=True)) - - flag = event_manager.wait('q', is_keyboard=True) - self.assertTrue(flag) - - event_manager.wait_and_handle('q', is_keyboard=True) - event_manager.clear('q', is_keyboard=True) - self.assertFalse(event_manager._events['_keyboard_q']._flag) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_utils/test_image_capture.py b/tests/test_apis/test_webcam/test_utils/test_image_capture.py deleted file mode 100644 index 8165299b89..0000000000 --- a/tests/test_apis/test_webcam/test_utils/test_image_capture.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import cv2 -import numpy as np - -from mmpose.apis.webcam.utils.image_capture import ImageCapture - - -class TestImageCapture(unittest.TestCase): - - def setUp(self): - self.image_path = 'tests/data/coco/000000000785.jpg' - self.image = cv2.imread(self.image_path) - - def test_init(self): - image_cap = ImageCapture(self.image_path) - self.assertIsInstance(image_cap.image, np.ndarray) - - image_cap = ImageCapture(self.image) - self.assertTrue((self.image == image_cap.image).all()) - - def test_image_capture(self): - image_cap = ImageCapture(self.image_path) - - # test operations - self.assertTrue(image_cap.isOpened()) - - flag, image_ = image_cap.read() - self.assertTrue(flag) - self.assertTrue((self.image == image_).all()) - - image_cap.release() - self.assertIsInstance(image_cap.image, np.ndarray) - - img_h = image_cap.get(cv2.CAP_PROP_FRAME_HEIGHT) - self.assertAlmostEqual(img_h, self.image.shape[0]) - img_w = image_cap.get(cv2.CAP_PROP_FRAME_WIDTH) - self.assertAlmostEqual(img_w, self.image.shape[1]) - fps = image_cap.get(cv2.CAP_PROP_FPS) - self.assertTrue(np.isnan(fps)) - - with self.assertRaises(NotImplementedError): - _ = image_cap.get(-1) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_utils/test_message.py b/tests/test_apis/test_webcam/test_utils/test_message.py deleted file mode 100644 index 536b672e78..0000000000 --- a/tests/test_apis/test_webcam/test_utils/test_message.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -import mmcv -import numpy as np - -from mmpose.apis.webcam.nodes import MonitorNode -from mmpose.apis.webcam.utils.message import FrameMessage, Message - - -class TestMessage(unittest.TestCase): - - def _get_monitor_node(self): - return MonitorNode( - name='monitor', input_buffer='_frame_', output_buffer='display') - - def _get_image(self): - image_path = 'tests/data/coco/000000000785.jpg' - image = mmcv.imread(image_path) - return image - - def test_message(self): - msg = Message() - - with self.assertWarnsRegex( - Warning, '`node_name` and `node_type` will be ' - 'overridden if node is provided.'): - node = self._get_monitor_node() - msg.update_route_info(node=node, node_name='monitor') - - route_info = msg.get_route_info() - self.assertEqual(len(route_info), 1) - self.assertEqual(route_info[0]['node'], 'monitor') - - msg.set_route_info([dict(node='recorder', node_type='RecorderNode')]) - msg.merge_route_info(route_info) - route_info = msg.get_route_info() - self.assertEqual(len(route_info), 2) - self.assertEqual(route_info[1]['node'], 'monitor') - - def test_frame_message(self): - msg = FrameMessage(None) - - # test set/get image - self.assertIsInstance(msg.data, dict) - self.assertIsNone(msg.get_image()) - - msg.set_image(self._get_image()) - self.assertIsInstance(msg.get_image(), np.ndarray) - - # test set/get objects - objects = msg.get_objects() - self.assertEqual(len(objects), 0) - - objects = [dict(label='cat'), dict(label='dog')] - msg.update_objects(objects) - dog_objects = msg.get_objects(lambda x: x['label'] == 'dog') - self.assertEqual(len(dog_objects), 1) - - msg.set_objects(objects[:1]) - dog_objects = msg.get_objects(lambda x: x['label'] == 'dog') - self.assertEqual(len(dog_objects), 0) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_utils/test_misc.py b/tests/test_apis/test_webcam/test_utils/test_misc.py deleted file mode 100644 index d60fdaa002..0000000000 --- a/tests/test_apis/test_webcam/test_utils/test_misc.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import os -import tempfile -import unittest - -import mmcv -import numpy as np - -from mmpose.apis.webcam.utils.misc import (copy_and_paste, expand_and_clamp, - get_cached_file_path, - get_config_path, is_image_file, - screen_matting) - - -class TestMISC(unittest.TestCase): - - def test_get_cached_file_path(self): - url = 'https://user-images.githubusercontent.com/15977946/' \ - '170850839-acc59e26-c6b3-48c9-a9ec-87556edb99ed.jpg' - with tempfile.TemporaryDirectory() as tmpdir: - cached_file = get_cached_file_path( - url, save_dir=tmpdir, file_name='sunglasses.jpg') - self.assertTrue(os.path.exists(cached_file)) - # check if image is successfully cached - img = mmcv.imread(cached_file) - self.assertIsNotNone(img) - - def test_get_config_path(self): - cfg_path = 'configs/_base_/datasets/coco.py' - path_in_module = get_config_path(cfg_path, 'mmpose') - self.assertEqual(cfg_path, path_in_module) - - cfg_path = '_base_/datasets/coco.py' - with self.assertRaises(FileNotFoundError): - _ = get_config_path(cfg_path, 'mmpose') - - def test_is_image_file(self): - self.assertTrue(is_image_file('example.png')) - self.assertFalse(is_image_file('example.mp4')) - - def test_expand_and_clamp(self): - img_shape = [125, 125, 3] - bbox = [0, 0, 40, 40] # [x1, y1, x2, y2] - - expanded_bbox = expand_and_clamp(bbox, img_shape) - self.assertListEqual(expanded_bbox, [0, 0, 45, 45]) - - def test_screen_matting(self): - img = np.random.randint(0, 256, size=(100, 100, 3)) - - # test with supported colors - for color in 'gbkw': - img_mat = screen_matting(img, color=color) - self.assertEqual(len(img_mat.shape), 2) - self.assertTupleEqual(img_mat.shape, img.shape[:2]) - - # test with unsupported arguments - with self.assertRaises(ValueError): - screen_matting(img) - - with self.assertRaises(NotImplementedError): - screen_matting(img, color='r') - - def test_copy_and_paste(self): - img = np.random.randint(0, 256, size=(50, 50, 3)) - background_img = np.random.randint(0, 256, size=(200, 200, 3)) - mask = screen_matting(background_img, color='b') - - output_img = copy_and_paste(img, background_img, mask) - self.assertTupleEqual(output_img.shape, background_img.shape) diff --git a/tests/test_apis/test_webcam/test_utils/test_pose.py b/tests/test_apis/test_webcam/test_utils/test_pose.py deleted file mode 100644 index 06f4fc0e41..0000000000 --- a/tests/test_apis/test_webcam/test_utils/test_pose.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -from mmengine import Config - -from mmpose.apis.webcam.utils.pose import (get_eye_keypoint_ids, - get_face_keypoint_ids, - get_hand_keypoint_ids, - get_mouth_keypoint_ids, - get_wrist_keypoint_ids) -from mmpose.datasets.datasets.utils import parse_pose_metainfo - - -class TestGetKeypointIds(unittest.TestCase): - - def setUp(self) -> None: - datasets_meta = dict( - coco=Config.fromfile('configs/_base_/datasets/coco.py'), - coco_wholebody=Config.fromfile( - 'configs/_base_/datasets/coco_wholebody.py'), - animalpose=Config.fromfile( - 'configs/_base_/datasets/animalpose.py'), - ap10k=Config.fromfile('configs/_base_/datasets/ap10k.py'), - wflw=Config.fromfile('configs/_base_/datasets/wflw.py'), - ) - self.datasets_meta = { - key: parse_pose_metainfo(value['dataset_info']) - for key, value in datasets_meta.items() - } - - def test_get_eye_keypoint_ids(self): - - # coco dataset - coco_dataset_meta = self.datasets_meta['coco'].copy() - left_eye_idx, right_eye_idx = get_eye_keypoint_ids(coco_dataset_meta) - self.assertEqual(left_eye_idx, 1) - self.assertEqual(right_eye_idx, 2) - - del coco_dataset_meta['keypoint_name2id']['left_eye'] - left_eye_idx, right_eye_idx = get_eye_keypoint_ids(coco_dataset_meta) - self.assertEqual(left_eye_idx, 1) - self.assertEqual(right_eye_idx, 2) - - # animalpose dataset - animalpose_dataset_meta = self.datasets_meta['animalpose'].copy() - left_eye_idx, right_eye_idx = get_eye_keypoint_ids( - animalpose_dataset_meta) - self.assertEqual(left_eye_idx, 0) - self.assertEqual(right_eye_idx, 1) - - # dataset without keys `'left_eye'` or `'right_eye'` - wflw_dataset_meta = self.datasets_meta['wflw'].copy() - with self.assertRaises(ValueError): - _ = get_eye_keypoint_ids(wflw_dataset_meta) - - def test_get_face_keypoint_ids(self): - - # coco_wholebody dataset - wholebody_dataset_meta = self.datasets_meta['coco_wholebody'].copy() - face_indices = get_face_keypoint_ids(wholebody_dataset_meta) - for i, ind in enumerate(range(23, 91)): - self.assertEqual(face_indices[i], ind) - - del wholebody_dataset_meta['keypoint_name2id']['face-0'] - face_indices = get_face_keypoint_ids(wholebody_dataset_meta) - for i, ind in enumerate(range(23, 91)): - self.assertEqual(face_indices[i], ind) - - # dataset without keys `'face-x'` - wflw_dataset_meta = self.datasets_meta['wflw'].copy() - with self.assertRaises(ValueError): - _ = get_face_keypoint_ids(wflw_dataset_meta) - - def test_get_wrist_keypoint_ids(self): - - # coco dataset - coco_dataset_meta = self.datasets_meta['coco'].copy() - left_wrist_idx, right_wrist_idx = get_wrist_keypoint_ids( - coco_dataset_meta) - self.assertEqual(left_wrist_idx, 9) - self.assertEqual(right_wrist_idx, 10) - - del coco_dataset_meta['keypoint_name2id']['left_wrist'] - left_wrist_idx, right_wrist_idx = get_wrist_keypoint_ids( - coco_dataset_meta) - self.assertEqual(left_wrist_idx, 9) - self.assertEqual(right_wrist_idx, 10) - - # animalpose dataset - animalpose_dataset_meta = self.datasets_meta['animalpose'].copy() - left_wrist_idx, right_wrist_idx = get_wrist_keypoint_ids( - animalpose_dataset_meta) - self.assertEqual(left_wrist_idx, 16) - self.assertEqual(right_wrist_idx, 17) - - # ap10k - ap10k_dataset_meta = self.datasets_meta['ap10k'].copy() - left_wrist_idx, right_wrist_idx = get_wrist_keypoint_ids( - ap10k_dataset_meta) - self.assertEqual(left_wrist_idx, 7) - self.assertEqual(right_wrist_idx, 10) - - # dataset without keys `'left_wrist'` or `'right_wrist'` - wflw_dataset_meta = self.datasets_meta['wflw'].copy() - with self.assertRaises(ValueError): - _ = get_wrist_keypoint_ids(wflw_dataset_meta) - - def test_get_mouth_keypoint_ids(self): - - # coco_wholebody dataset - wholebody_dataset_meta = self.datasets_meta['coco_wholebody'].copy() - mouth_index = get_mouth_keypoint_ids(wholebody_dataset_meta) - self.assertEqual(mouth_index, 85) - - del wholebody_dataset_meta['keypoint_name2id']['face-62'] - mouth_index = get_mouth_keypoint_ids(wholebody_dataset_meta) - self.assertEqual(mouth_index, 85) - - # dataset without keys `'face-62'` - wflw_dataset_meta = self.datasets_meta['wflw'].copy() - with self.assertRaises(ValueError): - _ = get_mouth_keypoint_ids(wflw_dataset_meta) - - def test_get_hand_keypoint_ids(self): - - # coco_wholebody dataset - wholebody_dataset_meta = self.datasets_meta['coco_wholebody'].copy() - hand_indices = get_hand_keypoint_ids(wholebody_dataset_meta) - for i, ind in enumerate(range(91, 133)): - self.assertEqual(hand_indices[i], ind) - - del wholebody_dataset_meta['keypoint_name2id']['left_hand_root'] - hand_indices = get_hand_keypoint_ids(wholebody_dataset_meta) - for i, ind in enumerate(range(91, 133)): - self.assertEqual(hand_indices[i], ind) - - # dataset without hand keys - wflw_dataset_meta = self.datasets_meta['wflw'].copy() - with self.assertRaises(ValueError): - _ = get_hand_keypoint_ids(wflw_dataset_meta) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_apis/test_webcam/test_webcam_executor.py b/tests/test_apis/test_webcam/test_webcam_executor.py deleted file mode 100644 index 0436308869..0000000000 --- a/tests/test_apis/test_webcam/test_webcam_executor.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import unittest - -from mmengine import Config - -from mmpose.apis.webcam import WebcamExecutor - - -class TestWebcamExecutor(unittest.TestCase): - - def setUp(self) -> None: - config = Config.fromfile('demo/webcam_cfg/test_camera.py').executor_cfg - config.camera_id = 'tests/data/posetrack18/videos/' \ - '000001_mpiinew_test/000001_mpiinew_test.mp4' - self.executor = WebcamExecutor(**config) - - def test_init(self): - - self.assertEqual(len(self.executor.node_list), 2) - self.assertEqual(self.executor.node_list[0].name, 'monitor') - self.assertEqual(self.executor.node_list[1].name, 'recorder') - - -if __name__ == '__main__': - unittest.main()