From 9169f7dd6942b59fc6bbdd3b691ef1490961bf9c Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Thu, 1 Aug 2019 17:39:23 +0100 Subject: [PATCH 01/65] Start of parsing of xml files for transfer learning --- .../d04_modelling/transfer/data_loader.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 src/traffic_analysis/d04_modelling/transfer/data_loader.py diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer/data_loader.py new file mode 100644 index 0000000..68130d1 --- /dev/null +++ b/src/traffic_analysis/d04_modelling/transfer/data_loader.py @@ -0,0 +1,45 @@ + +from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 + +from enum import Enum +class TransferDataset(Enum): + detrac = 1 + cvat = 2 + + +class DataLoader(object): + + def __init__(self, datasets, creds, paths): + self.datasets = datasets + self.creds = creds + self.paths = paths + self.parse_mapping = {TransferDataset.detrac: self.parse_detrac_data, + TransferDataset.cvat: self.parse_cvat_data} + self.data_loader_s3 = DataLoaderS3(s3_credentials=, bucket_name=) + + return + + + def get_train_and_test(self, train_fraction): + + for dataset in self.datasets: + self.parse_mapping[dataset]() + + + return + + def parse_detrac_data(self): + + print('Parsing detrac dataset...') + + return + + def parse_cvat_data(self): + + print('Parsing cvat dataset...') + + return + + + +dl = DataLoader(datasets=[], creds=creds, paths=paths) From 7ea9cfa0cd0b4a44b2be6b584ecf452d0ce1b9de Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Fri, 2 Aug 2019 15:32:09 +0100 Subject: [PATCH 02/65] More parsing --- conf/base/paths.yml | 2 + .../d04_modelling/transfer/MVI_20012.xml | 44044 ++++++++++++++++ .../d04_modelling/transfer/data_loader.py | 34 +- .../d04_modelling/transfer/example.py | 100 + 4 files changed, 44178 insertions(+), 2 deletions(-) create mode 100644 src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml create mode 100644 src/traffic_analysis/d04_modelling/transfer/example.py diff --git a/conf/base/paths.yml b/conf/base/paths.yml index 7f6aa2f..323f5d9 100644 --- a/conf/base/paths.yml +++ b/conf/base/paths.yml @@ -9,12 +9,14 @@ s3_paths: s3_profile: "dssg" s3_creds: "dev_s3" s3_detect_model: "ref/model_conf/" + s3_detrac_annotations: "ref/annotations/detrac/" local_paths: temp_video: "data/temp/videos/" temp_raw_video: "data/temp/raw_videos/" temp_frame_level: "data/temp/frame_level/" temp_video_level: "data/temp/video_level/" + temp_annotation: "data/temp/annotation/" video_names: "data/ref/video_names/" processed_video: "results/jamcams/" plots: "plots/" diff --git a/src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml b/src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml new file mode 100644 index 0000000..f73fdd9 --- /dev/null +++ b/src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml @@ -0,0 +1,44044 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer/data_loader.py index 68130d1..86f7b7a 100644 --- a/src/traffic_analysis/d04_modelling/transfer/data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer/data_loader.py @@ -1,6 +1,9 @@ +import xml.etree.ElementTree as ET +from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 + from enum import Enum class TransferDataset(Enum): detrac = 1 @@ -15,7 +18,9 @@ def __init__(self, datasets, creds, paths): self.paths = paths self.parse_mapping = {TransferDataset.detrac: self.parse_detrac_data, TransferDataset.cvat: self.parse_cvat_data} - self.data_loader_s3 = DataLoaderS3(s3_credentials=, bucket_name=) + + self.data_loader_s3 = DataLoaderS3(s3_credentials=creds[paths['s3_creds']], + bucket_name=paths['bucket_name']) return @@ -32,6 +37,28 @@ def parse_detrac_data(self): print('Parsing detrac dataset...') + xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_detrac_annotations']) + for xml_file in xml_files: + self.parse_detrac_xml_file(xml_file) + + return + + def parse_detrac_xml_file(self, xml_file): + + path = self.paths['temp_annotation'] + xml_file.split('/')[-1] + + self.data_loader_s3.download_file(path_of_file_to_download=xml_file, + path_to_download_file_to=path) + + tree = ET.parse(xml_file.split('/')[-1]) + img_name = path.split('/')[-1][:-4] + + height = tree.findtext("./size/height") + width = tree.findtext("./size/width") + + objects = [img_name, width, height] + print(objects) + return def parse_cvat_data(self): @@ -41,5 +68,8 @@ def parse_cvat_data(self): return +paths = load_paths() +creds = load_credentials() -dl = DataLoader(datasets=[], creds=creds, paths=paths) +dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) +dl.get_train_and_test(.8) diff --git a/src/traffic_analysis/d04_modelling/transfer/example.py b/src/traffic_analysis/d04_modelling/transfer/example.py new file mode 100644 index 0000000..5f075ec --- /dev/null +++ b/src/traffic_analysis/d04_modelling/transfer/example.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +import xml.etree.ElementTree as ET +import os + +names_dict = {} +cnt = 0 +f = open('./voc_names.txt', 'r').readlines() +for line in f: + line = line.strip() + names_dict[line] = cnt + cnt += 1 + +voc_07 = '/data/VOCdevkit/VOC2007' +voc_12 = '/data/VOCdevkit/VOC2012' + +anno_path = [os.path.join(voc_07, 'Annotations'), os.path.join(voc_12, 'Annotations')] +img_path = [os.path.join(voc_07, 'JPEGImages'), os.path.join(voc_12, 'JPEGImages')] + +trainval_path = [os.path.join(voc_07, 'ImageSets/Main/trainval.txt'), + os.path.join(voc_12, 'ImageSets/Main/trainval.txt')] +test_path = [os.path.join(voc_07, 'ImageSets/Main/test.txt')] + + +def parse_xml(path): + tree = ET.parse(path) + img_name = path.split('/')[-1][:-4] + + height = tree.findtext("./size/height") + width = tree.findtext("./size/width") + + objects = [img_name, width, height] + + for obj in tree.findall('object'): + difficult = obj.find('difficult').text + if difficult == '1': + continue + name = obj.find('name').text + bbox = obj.find('bndbox') + xmin = bbox.find('xmin').text + ymin = bbox.find('ymin').text + xmax = bbox.find('xmax').text + ymax = bbox.find('ymax').text + + name = str(names_dict[name]) + objects.extend([name, xmin, ymin, xmax, ymax]) + if len(objects) > 1: + return objects + else: + return None + + +test_cnt = 0 + + +def gen_test_txt(txt_path): + global test_cnt + f = open(txt_path, 'w') + + for i, path in enumerate(test_path): + img_names = open(path, 'r').readlines() + for img_name in img_names: + img_name = img_name.strip() + xml_path = anno_path[i] + '/' + img_name + '.xml' + objects = parse_xml(xml_path) + if objects: + objects[0] = img_path[i] + '/' + img_name + '.jpg' + if os.path.exists(objects[0]): + objects.insert(0, str(test_cnt)) + test_cnt += 1 + objects = ' '.join(objects) + '\n' + f.write(objects) + f.close() + + +train_cnt = 0 + + +def gen_train_txt(txt_path): + global train_cnt + f = open(txt_path, 'w') + + for i, path in enumerate(trainval_path): + img_names = open(path, 'r').readlines() + for img_name in img_names: + img_name = img_name.strip() + xml_path = anno_path[i] + '/' + img_name + '.xml' + objects = parse_xml(xml_path) + if objects: + objects[0] = img_path[i] + '/' + img_name + '.jpg' + if os.path.exists(objects[0]): + objects.insert(0, str(train_cnt)) + train_cnt += 1 + objects = ' '.join(objects) + '\n' + f.write(objects) + f.close() + + +gen_train_txt('train.txt') +gen_test_txt('val.txt') \ No newline at end of file From cb8ed6da38a831cbdb78c717735479d3fa2b8402 Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Mon, 5 Aug 2019 12:20:31 +0100 Subject: [PATCH 03/65] Deletion of xml file --- .../d04_modelling/transfer/MVI_20012.xml | 44044 ---------------- 1 file changed, 44044 deletions(-) delete mode 100644 src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml diff --git a/src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml b/src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml deleted file mode 100644 index f73fdd9..0000000 --- a/src/traffic_analysis/d04_modelling/transfer/MVI_20012.xml +++ /dev/null @@ -1,44044 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file From 8b34af2c69483890fbf272f5f7120fcf94bdfb68 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 6 Aug 2019 15:39:33 +0100 Subject: [PATCH 04/65] updated checking of checkpoint fn --- .../d04_modelling/perform_detection_tensorflow.py | 2 +- src/traffic_analysis/d04_modelling/test_detection.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py b/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py index 44e9dad..c533113 100644 --- a/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py +++ b/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py @@ -28,7 +28,7 @@ def detect_objects_in_image(image_capture, params, paths, s3_credentials): """ detection_model = params['detection_model'] - local_filepath_model = os.path.join(paths['local_detection_model'], detection_model, 'yolov3.ckpt') + local_filepath_model = os.path.join(paths['local_detection_model'], detection_model, 'checkpoint') if detection_model == 'yolov3_tf': # only use with yolov3_tf as detection model if not os.path.exists(local_filepath_model): # create yolov3 tensorflow model on local if does not exist diff --git a/src/traffic_analysis/d04_modelling/test_detection.py b/src/traffic_analysis/d04_modelling/test_detection.py index 88d493a..f12991d 100644 --- a/src/traffic_analysis/d04_modelling/test_detection.py +++ b/src/traffic_analysis/d04_modelling/test_detection.py @@ -35,8 +35,6 @@ def test_detection(image_path): end_time = time.time() delt_time = end_time - start_time - print(delt_time) - print(bbox, label, confidence) -test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') -# test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') +# test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') +test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') From 6ec44bd165f1cb6080f36aeb8f9ba9a5d1dea823 Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Tue, 6 Aug 2019 17:21:35 +0100 Subject: [PATCH 05/65] Dealing with empty xml files --- conf/base/paths.yml | 2 +- .../d04_modelling/transfer/MVI_20011.xml | 37925 ++++++++++++++++ .../d04_modelling/transfer/books.xml | 120 + .../d04_modelling/transfer/data_loader.py | 142 +- 4 files changed, 38171 insertions(+), 18 deletions(-) create mode 100644 src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml create mode 100644 src/traffic_analysis/d04_modelling/transfer/books.xml diff --git a/conf/base/paths.yml b/conf/base/paths.yml index 323f5d9..9360117 100644 --- a/conf/base/paths.yml +++ b/conf/base/paths.yml @@ -2,7 +2,7 @@ s3_paths: bucket_name: "air-pollution-uk" #s3 bucket name s3_video: "raw/videos/" #path to video data in s3 bucket - s3_annotations: "ref/annotations/" + s3_cvat_annotations: "ref/annotations/cvat/" s3_video_names: "ref/video_names/" s3_camera_details: "ref/camera_details/camera_details.json" s3_frame_level: "frame_level/" diff --git a/src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml b/src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml new file mode 100644 index 0000000..51c951c --- /dev/null +++ b/src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml @@ -0,0 +1,37925 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer/books.xml b/src/traffic_analysis/d04_modelling/transfer/books.xml new file mode 100644 index 0000000..7c254f1 --- /dev/null +++ b/src/traffic_analysis/d04_modelling/transfer/books.xml @@ -0,0 +1,120 @@ + + + + Gambardella, Matthew + XML Developer's Guide + Computer + 44.95 + 2000-10-01 + An in-depth look at creating applications + with XML. + + + Ralls, Kim + Midnight Rain + Fantasy + 5.95 + 2000-12-16 + A former architect battles corporate zombies, + an evil sorceress, and her own childhood to become queen + of the world. + + + Corets, Eva + Maeve Ascendant + Fantasy + 5.95 + 2000-11-17 + After the collapse of a nanotechnology + society in England, the young survivors lay the + foundation for a new society. + + + Corets, Eva + Oberon's Legacy + Fantasy + 5.95 + 2001-03-10 + In post-apocalypse England, the mysterious + agent known only as Oberon helps to create a new life + for the inhabitants of London. Sequel to Maeve + Ascendant. + + + Corets, Eva + The Sundered Grail + Fantasy + 5.95 + 2001-09-10 + The two daughters of Maeve, half-sisters, + battle one another for control of England. Sequel to + Oberon's Legacy. + + + Randall, Cynthia + Lover Birds + Romance + 4.95 + 2000-09-02 + When Carla meets Paul at an ornithology + conference, tempers fly as feathers get ruffled. + + + Thurman, Paula + Splish Splash + Romance + 4.95 + 2000-11-02 + A deep sea diver finds true love twenty + thousand leagues beneath the sea. + + + Knorr, Stefan + Creepy Crawlies + Horror + 4.95 + 2000-12-06 + An anthology of horror stories about roaches, + centipedes, scorpions and other insects. + + + Kress, Peter + Paradox Lost + Science Fiction + 6.95 + 2000-11-02 + After an inadvertant trip through a Heisenberg + Uncertainty Device, James Salway discovers the problems + of being quantum. + + + O'Brien, Tim + Microsoft .NET: The Programming Bible + Computer + 36.95 + 2000-12-09 + Microsoft's .NET initiative is explored in + detail in this deep programmer's reference. + + + O'Brien, Tim + MSXML3: A Comprehensive Guide + Computer + 36.95 + 2000-12-01 + The Microsoft MSXML3 parser is covered in + detail, with attention to XML DOM interfaces, XSLT processing, + SAX and more. + + + Galos, Mike + Visual Studio 7: A Comprehensive Guide + Computer + 49.95 + 2001-04-16 + Microsoft Visual Studio 7 is explored in depth, + looking at how Visual Basic, Visual C++, C#, and ASP+ are + integrated into a comprehensive development + environment. + + diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer/data_loader.py index 86f7b7a..f041119 100644 --- a/src/traffic_analysis/d04_modelling/transfer/data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer/data_loader.py @@ -2,6 +2,7 @@ from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 +from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir from enum import Enum @@ -27,49 +28,156 @@ def __init__(self, datasets, creds, paths): def get_train_and_test(self, train_fraction): + results = [] + delete_and_recreate_dir(self.paths['temp_annotation']) + for dataset in self.datasets: - self.parse_mapping[dataset]() + results += self.parse_mapping[dataset]() + delete_and_recreate_dir(self.paths['temp_annotation']) return def parse_detrac_data(self): print('Parsing detrac dataset...') - + results = [] xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_detrac_annotations']) for xml_file in xml_files: - self.parse_detrac_xml_file(xml_file) + result = self.parse_detrac_xml_file(xml_file) + if (result): + results += result - return + return results def parse_detrac_xml_file(self, xml_file): path = self.paths['temp_annotation'] + xml_file.split('/')[-1] - self.data_loader_s3.download_file(path_of_file_to_download=xml_file, - path_to_download_file_to=path) + try: + self.data_loader_s3.download_file(path_of_file_to_download=xml_file, + path_to_download_file_to=path) + except: + print("Could not download file " + xml_file) + + root = ET.parse(path).getroot() + + results = [] + # [image_index + # image_path + # image_width + # image_height + # label_index, + # x_min, + # y_min, + # x_max, + # y_max] + + im_path = path.split('/')[-1][:-4] + im_width = 250 + im_height = 250 + + for track in root.iter('frame'): + + result = str(track.attrib['num']) + \ + ' ' + str(im_path) + \ + ' ' + str(im_width) + \ + ' ' + str(im_height) + + for frame_obj in track.iter('target'): + vehicle_type = frame_obj.find('attribute').attrib['vehicle_type'] + + left = float(frame_obj.find('box').attrib['left']) + top = float(frame_obj.find('box').attrib['top']) + width = float(frame_obj.find('box').attrib['width']) + height = float(frame_obj.find('box').attrib['height']) + + x_min = left + y_min = top - height + x_max = left + width + y_max = top + + result += ' ' + str(vehicle_type) + \ + ' ' + str(x_min) + \ + ' ' + str(y_min) + \ + ' ' + str(x_max) + \ + ' ' + str(y_max) + + results.append(result) + + if len(results) > 1: + return results + else: + return None - tree = ET.parse(xml_file.split('/')[-1]) - img_name = path.split('/')[-1][:-4] + def parse_cvat_data(self): - height = tree.findtext("./size/height") - width = tree.findtext("./size/width") + print('Parsing cvat dataset...') + results = [] + xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_cvat_annotations']) + for xml_file in xml_files: + result = self.parse_cvat_xml_file(xml_file) + if(result): + results += result - objects = [img_name, width, height] - print(objects) + return results - return + def parse_cvat_xml_file(self, xml_file): - def parse_cvat_data(self): + path = self.paths['temp_annotation'] + xml_file.split('/')[-1] - print('Parsing cvat dataset...') + try: + self.data_loader_s3.download_file(path_of_file_to_download=xml_file, + path_to_download_file_to=path) + except: + print("Could not download file " + xml_file) - return + root = ET.parse(path).getroot() + + results = [] + + im_path = path.split('/')[-1][:-4] + im_width = 250 + im_height = 250 + + frame_dict = {} + + for track in root.iter('track'): + if track.attrib['label'] == 'vehicle': + for frame in track.iter('box'): + frame_num = frame.attrib['frame'] + if(frame_num not in frame_dict): + frame_dict[frame_num] = str(frame_num) + ' ' + \ + str(im_path) + ' ' + \ + str(im_width) + ' ' + \ + str(im_height) + + vehicle_type = frame.findall('attribute')[2].text + x_min = float(frame.attrib['xtl']) + y_min = float(frame.attrib['ybr']) + x_max = float(frame.attrib['xbr']) + y_max = float(frame.attrib['ytl']) + + frame_dict[frame_num] += ' ' + str(vehicle_type) + \ + ' ' + str(x_min) + \ + ' ' + str(y_min) + \ + ' ' + str(x_max) + \ + ' ' + str(y_max) + + results = [] + for key in frame_dict: + results.append(frame_dict[key]) + + if len(results) > 1: + return results + else: + return None paths = load_paths() creds = load_credentials() -dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) +dl = DataLoader(datasets=[TransferDataset.detrac, TransferDataset.cvat], creds=creds, paths=paths) dl.get_train_and_test(.8) + +#TODO how should I actually interpret the dimensions? From 688e3b01363346789a7373fd04456c41bc494b40 Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Wed, 7 Aug 2019 11:51:23 +0100 Subject: [PATCH 06/65] More parsing of both the input and output data for transfer learning --- conf/base/paths.yml | 4 +- .../d04_modelling/transfer/MVI_20011.xml | 37925 ---------------- .../d04_modelling/transfer/books.xml | 120 - .../d04_modelling/transfer/data_loader.py | 104 +- 4 files changed, 89 insertions(+), 38064 deletions(-) delete mode 100644 src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml delete mode 100644 src/traffic_analysis/d04_modelling/transfer/books.xml diff --git a/conf/base/paths.yml b/conf/base/paths.yml index 9360117..29add00 100644 --- a/conf/base/paths.yml +++ b/conf/base/paths.yml @@ -2,17 +2,19 @@ s3_paths: bucket_name: "air-pollution-uk" #s3 bucket name s3_video: "raw/videos/" #path to video data in s3 bucket - s3_cvat_annotations: "ref/annotations/cvat/" s3_video_names: "ref/video_names/" s3_camera_details: "ref/camera_details/camera_details.json" s3_frame_level: "frame_level/" s3_profile: "dssg" s3_creds: "dev_s3" s3_detect_model: "ref/model_conf/" + s3_cvat_annotations: "ref/annotations/cvat/" s3_detrac_annotations: "ref/annotations/detrac/" + s3_detrac_images: "raw/images/detrac/" local_paths: temp_video: "data/temp/videos/" + temp_raw_images: "data/temp/raw_images/" temp_raw_video: "data/temp/raw_videos/" temp_frame_level: "data/temp/frame_level/" temp_video_level: "data/temp/video_level/" diff --git a/src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml b/src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml deleted file mode 100644 index 51c951c..0000000 --- a/src/traffic_analysis/d04_modelling/transfer/MVI_20011.xml +++ /dev/null @@ -1,37925 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer/books.xml b/src/traffic_analysis/d04_modelling/transfer/books.xml deleted file mode 100644 index 7c254f1..0000000 --- a/src/traffic_analysis/d04_modelling/transfer/books.xml +++ /dev/null @@ -1,120 +0,0 @@ - - - - Gambardella, Matthew - XML Developer's Guide - Computer - 44.95 - 2000-10-01 - An in-depth look at creating applications - with XML. - - - Ralls, Kim - Midnight Rain - Fantasy - 5.95 - 2000-12-16 - A former architect battles corporate zombies, - an evil sorceress, and her own childhood to become queen - of the world. - - - Corets, Eva - Maeve Ascendant - Fantasy - 5.95 - 2000-11-17 - After the collapse of a nanotechnology - society in England, the young survivors lay the - foundation for a new society. - - - Corets, Eva - Oberon's Legacy - Fantasy - 5.95 - 2001-03-10 - In post-apocalypse England, the mysterious - agent known only as Oberon helps to create a new life - for the inhabitants of London. Sequel to Maeve - Ascendant. - - - Corets, Eva - The Sundered Grail - Fantasy - 5.95 - 2001-09-10 - The two daughters of Maeve, half-sisters, - battle one another for control of England. Sequel to - Oberon's Legacy. - - - Randall, Cynthia - Lover Birds - Romance - 4.95 - 2000-09-02 - When Carla meets Paul at an ornithology - conference, tempers fly as feathers get ruffled. - - - Thurman, Paula - Splish Splash - Romance - 4.95 - 2000-11-02 - A deep sea diver finds true love twenty - thousand leagues beneath the sea. - - - Knorr, Stefan - Creepy Crawlies - Horror - 4.95 - 2000-12-06 - An anthology of horror stories about roaches, - centipedes, scorpions and other insects. - - - Kress, Peter - Paradox Lost - Science Fiction - 6.95 - 2000-11-02 - After an inadvertant trip through a Heisenberg - Uncertainty Device, James Salway discovers the problems - of being quantum. - - - O'Brien, Tim - Microsoft .NET: The Programming Bible - Computer - 36.95 - 2000-12-09 - Microsoft's .NET initiative is explored in - detail in this deep programmer's reference. - - - O'Brien, Tim - MSXML3: A Comprehensive Guide - Computer - 36.95 - 2000-12-01 - The Microsoft MSXML3 parser is covered in - detail, with attention to XML DOM interfaces, XSLT processing, - SAX and more. - - - Galos, Mike - Visual Studio 7: A Comprehensive Guide - Computer - 49.95 - 2001-04-16 - Microsoft Visual Studio 7 is explored in depth, - looking at how Visual Basic, Visual C++, C#, and ASP+ are - integrated into a comprehensive development - environment. - - diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer/data_loader.py index f041119..ea04708 100644 --- a/src/traffic_analysis/d04_modelling/transfer/data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer/data_loader.py @@ -1,4 +1,7 @@ import xml.etree.ElementTree as ET +from random import shuffle +from PIL import Image +import numpy as np from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 @@ -17,8 +20,8 @@ def __init__(self, datasets, creds, paths): self.datasets = datasets self.creds = creds self.paths = paths - self.parse_mapping = {TransferDataset.detrac: self.parse_detrac_data, - TransferDataset.cvat: self.parse_cvat_data} + self.load_mapping = {TransferDataset.detrac: self.load_detrac_data, + TransferDataset.cvat: self.load_cvat_data} self.data_loader_s3 = DataLoaderS3(s3_credentials=creds[paths['s3_creds']], bucket_name=paths['bucket_name']) @@ -28,27 +31,71 @@ def __init__(self, datasets, creds, paths): def get_train_and_test(self, train_fraction): - results = [] delete_and_recreate_dir(self.paths['temp_annotation']) + delete_and_recreate_dir(self.paths['temp_raw_images']) + x, y = self.load_data_from_s3() + delete_and_recreate_dir(self.paths['temp_annotation']) + delete_and_recreate_dir(self.paths['temp_raw_images']) + + """ + results = shuffle(results) + train = results[:int(train_fraction * len(results))] + test = results[int(train_fraction * len(results)):] + """ + + return #train_x, train_y, test_x, test_y + + def load_data_from_s3(self): + + xs = [] + ys = [] for dataset in self.datasets: - results += self.parse_mapping[dataset]() + x, y = self.load_mapping[dataset]() + xs += x + ys += y - delete_and_recreate_dir(self.paths['temp_annotation']) + return x, y - return + def load_inputs_from_s3(self, test_y, train_y): - def parse_detrac_data(self): + test_x = [] + train_x = [] - print('Parsing detrac dataset...') - results = [] + return test_x, train_x + + def load_detrac_data(self): + + print('Parsing detrac xmls...') + y = [] xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_detrac_annotations']) for xml_file in xml_files: result = self.parse_detrac_xml_file(xml_file) if (result): - results += result - - return results + y += result + + print('Loading detrac images...') + x = [] + for labels in y: + print(labels) + image_num = labels.split(' ')[0].zfill(5) + folder = labels.split(' ')[1] + file_to_download = paths['s3_detrac_images'] + \ + folder + '/' + \ + 'img' + image_num + '.jpg' + download_file_to = paths['temp_raw_images'] + \ + folder + '_' + \ + image_num + '.jpg' + + self.data_loader_s3.download_file( + path_of_file_to_download=file_to_download, + path_to_download_file_to=download_file_to) + + img = Image.open(download_file_to) + img.load() + x.append(np.asarray(img, dtype="int32")) + + return x, y def parse_detrac_xml_file(self, xml_file): @@ -110,17 +157,38 @@ def parse_detrac_xml_file(self, xml_file): else: return None - def parse_cvat_data(self): + def load_cvat_data(self): - print('Parsing cvat dataset...') - results = [] + print('Parsing cvat xmls...') + y = [] xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_cvat_annotations']) for xml_file in xml_files: result = self.parse_cvat_xml_file(xml_file) if(result): - results += result - - return results + y += result + + print('Loading cvat videos...') + x = [] + for labels in y: + print(labels) + image_num = labels.split(' ')[0].zfill(5) + folder = labels.split(' ')[1] + file_to_download = paths['s3_detrac_images'] + \ + folder + '/' + \ + 'img' + image_num + '.jpg' + download_file_to = paths['temp_raw_images'] + \ + folder + '_' + \ + image_num + '.jpg' + + self.data_loader_s3.download_file( + path_of_file_to_download=file_to_download, + path_to_download_file_to=download_file_to) + + img = Image.open(download_file_to) + img.load() + x.append(np.asarray(img, dtype="int32")) + + return x, y def parse_cvat_xml_file(self, xml_file): From 9e1c1c435b06485e6a1ff0d82711980d39839c9e Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Wed, 7 Aug 2019 12:19:54 +0100 Subject: [PATCH 07/65] benchmarking for performing object detection --- .../perform_detection_tensorflow.py | 23 +++++++++++++++++-- .../d04_modelling/test_detection.py | 4 ++-- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py b/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py index c533113..2fb6328 100644 --- a/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py +++ b/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py @@ -6,6 +6,7 @@ import cv2 import os import numpy as np +import time from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names, \ remove_overlapping_boxes, letterbox_resize @@ -55,7 +56,7 @@ def pass_image_through_nn(image_capture, paths, params): labels (list(str)): list of detection labels confs (list(float)): list of detection scores """ - + time_0 = time.time() conf_thresh = params['detection_confidence_threshold'] iou_thresh = params['detection_iou_threshold'] detection_model = params['detection_model'] @@ -67,29 +68,47 @@ def pass_image_through_nn(image_capture, paths, params): class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') classes = read_class_names(class_name_path) n_classes = len(classes) + time_1 = time.time() with tf.Session() as sess: # initialize tensorflow yolov3 model + time_2 = time.time() init_data = tf.placeholder(tf.float32, [1, 416, 416, 3], name='init_data') yolo_model = YoloV3(n_classes, anchors) with tf.variable_scope('YoloV3'): feature_map = yolo_model.forward(init_data, False) + time_3 = time.time() pred_boxes, pred_confs, pred_probs = yolo_model.predict(feature_map) pred_scores = pred_confs * pred_probs + time_4 = time.time() boxes, scores, labels = remove_overlapping_boxes(pred_boxes, pred_scores, n_classes, max_boxes=200, score_thresh=conf_thresh, nms_thresh=iou_thresh) + time_5 = time.time() saver = tf.train.Saver() + time_6 = time.time() saver.restore(sess, local_filepath_model) + time_7 = time.time() boxes_unscaled, scores_out, labels_out = sess.run([boxes, scores, labels], feed_dict={init_data: image_array}) + time_8 = time.time() # rescale the coordinates to the original image boxes_out = rescale_boxes(boxes_unscaled, formatting_params) - + time_9 = time.time() + + print(time_1 - time_0) + print(time_2 - time_1) + print(time_3 - time_2) + print(time_4 - time_3) + print(time_5 - time_4) + print(time_6 - time_5) + print(time_7 - time_6) + print(time_8 - time_7) + print(time_9 - time_8) return boxes_out, labels_out, scores_out diff --git a/src/traffic_analysis/d04_modelling/test_detection.py b/src/traffic_analysis/d04_modelling/test_detection.py index f12991d..8dda660 100644 --- a/src/traffic_analysis/d04_modelling/test_detection.py +++ b/src/traffic_analysis/d04_modelling/test_detection.py @@ -36,5 +36,5 @@ def test_detection(image_path): delt_time = end_time - start_time -# test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') -test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') +test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') +# test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') From 5456c9b349c7c74c49d8b3fbeb8069d3232cdbb9 Mon Sep 17 00:00:00 2001 From: Jack Hensley Date: Wed, 7 Aug 2019 11:20:07 +0000 Subject: [PATCH 08/65] edited params and run_pipeline so they can run and be tested --- conf/base/parameters.yml | 2 +- src/run_pipeline.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conf/base/parameters.yml b/conf/base/parameters.yml index eaa9f84..58bd467 100644 --- a/conf/base/parameters.yml +++ b/conf/base/parameters.yml @@ -37,7 +37,7 @@ data_renaming: modelling: # obj detection - detection_model: "yolov3" + detection_model: "yolov3-tiny" detection_implementation: "cvlib" detection_iou_threshold: 0.05 detection_confidence_threshold: 0.2 diff --git a/src/run_pipeline.py b/src/run_pipeline.py index 19447ab..98633ae 100644 --- a/src/run_pipeline.py +++ b/src/run_pipeline.py @@ -16,8 +16,8 @@ retrieve_and_upload_video_names_to_s3(ouput_file_name='Date_20190717_Cameras_03604_02262', paths=paths, - from_date='2019-07-17', to_date='2019-07-17', - from_time='13-00-00', to_time='14-00-00', + from_date='2019-07-15', to_date='2019-07-15', +# from_time='13-00-00', to_time='14-00-00', s3_credentials=s3_credentials, camera_list=['00001.03604', '00001.02262']) """ From acb2bc3599d492726cecaab5b403601dd10e294a Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Wed, 7 Aug 2019 23:22:16 +0100 Subject: [PATCH 09/65] Fixing issues with loading videos for the cvat dataset --- .../d04_modelling/transfer/data_loader.py | 102 ++++++++++++------ 1 file changed, 68 insertions(+), 34 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer/data_loader.py index ea04708..b5969b0 100644 --- a/src/traffic_analysis/d04_modelling/transfer/data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer/data_loader.py @@ -1,11 +1,10 @@ import xml.etree.ElementTree as ET -from random import shuffle from PIL import Image import numpy as np from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 -from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir +from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir, mp4_to_npy from enum import Enum @@ -33,36 +32,32 @@ def get_train_and_test(self, train_fraction): delete_and_recreate_dir(self.paths['temp_annotation']) delete_and_recreate_dir(self.paths['temp_raw_images']) + delete_and_recreate_dir(self.paths['temp_raw_video']) x, y = self.load_data_from_s3() delete_and_recreate_dir(self.paths['temp_annotation']) delete_and_recreate_dir(self.paths['temp_raw_images']) + delete_and_recreate_dir(self.paths['temp_raw_video']) - """ - results = shuffle(results) - train = results[:int(train_fraction * len(results))] - test = results[int(train_fraction * len(results)):] - """ + x_train = x[:int(len(x) * train_fraction)] + y_train = y[:int(len(x) * train_fraction)] - return #train_x, train_y, test_x, test_y + x_test = x[int(len(x) * train_fraction):] + y_test = y[int(len(x) * train_fraction):] + + return x_train, y_train, x_test, y_test def load_data_from_s3(self): xs = [] ys = [] - for dataset in self.datasets: x, y = self.load_mapping[dataset]() + assert len(x) == len(y), "Mismatch in number of input and output pairs! " \ + "(Dataset: " + dataset.name + ")" xs += x ys += y - return x, y - - def load_inputs_from_s3(self, test_y, train_y): - - test_x = [] - train_x = [] - - return test_x, train_x + return xs, ys def load_detrac_data(self): @@ -77,7 +72,6 @@ def load_detrac_data(self): print('Loading detrac images...') x = [] for labels in y: - print(labels) image_num = labels.split(' ')[0].zfill(5) folder = labels.split(' ')[1] file_to_download = paths['s3_detrac_images'] + \ @@ -168,27 +162,67 @@ def load_cvat_data(self): y += result print('Loading cvat videos...') - x = [] + # Build a list of the videos needed + video_set = set() for labels in y: - print(labels) - image_num = labels.split(' ')[0].zfill(5) - folder = labels.split(' ')[1] - file_to_download = paths['s3_detrac_images'] + \ - folder + '/' + \ - 'img' + image_num + '.jpg' - download_file_to = paths['temp_raw_images'] + \ - folder + '_' + \ - image_num + '.jpg' + video_set.add(labels.split(' ')[1]) + x = [] + + for id in video_set: + video = self.get_cvat_video(id) + + if(video is not None): + for labels in y: + if(labels.split(' ')[1] == id): + image_num = labels.split(' ')[0] + x.append(video[int(image_num), :, :, :]) + + return x, y + + def get_cvat_video(self, id): + vals = id.split('_') + if (len(vals) >= 4): + date = vals[1] + file_names = [id.split('_')[1:][0].replace('-', '') + '-' + + id.split('_')[1:][1].replace('-', '')[:6] + '_' + + id.split('_')[1:][2], + id.split('_')[1:][0] + ' ' + + id.split('_')[1:][1].replace('-', ':') + '_' + + id.split('_')[1:][2]] + else: + date = vals[0] + file_names = [id.split('_')[0].replace('-', '') + '-' + + id.split('_')[1].replace('-', '')[:6] + '_' + + id.split('_')[2], + id.split('_')[0] + ' ' + + id.split('_')[1].replace('-', ':') + '_' + + id.split('_')[2]] + file_to_download = paths['s3_video'] + \ + date + '/' + \ + file_names[0] + '.mp4' + download_file_to = paths['temp_raw_video'] + \ + file_names[0] + '.mp4' + try: self.data_loader_s3.download_file( path_of_file_to_download=file_to_download, path_to_download_file_to=download_file_to) + return mp4_to_npy(download_file_to) - img = Image.open(download_file_to) - img.load() - x.append(np.asarray(img, dtype="int32")) + except: + try: + file_to_download = paths['s3_video'] + \ + date + '/' + \ + file_names[1] + '.mp4' - return x, y + self.data_loader_s3.download_file( + path_of_file_to_download=file_to_download, + path_to_download_file_to=download_file_to) + return mp4_to_npy(download_file_to) + + except: + print('Could not download file: ' + id) + return def parse_cvat_xml_file(self, xml_file): @@ -245,7 +279,7 @@ def parse_cvat_xml_file(self, xml_file): paths = load_paths() creds = load_credentials() -dl = DataLoader(datasets=[TransferDataset.detrac, TransferDataset.cvat], creds=creds, paths=paths) -dl.get_train_and_test(.8) +dl = DataLoader(datasets=[TransferDataset.cvat], creds=creds, paths=paths) +x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) #TODO how should I actually interpret the dimensions? From c0601a3980c72a35f2d6cede96f73a1dcd6880ee Mon Sep 17 00:00:00 2001 From: Sam Blakeman Date: Thu, 8 Aug 2019 12:19:08 +0100 Subject: [PATCH 10/65] Working parsing of input and output data for transfer --- src/traffic_analysis/d02_ref/ref_utils.py | 43 ++++++++++ .../d02_ref/upload_annotation_names_to_s3.py | 17 ++-- .../d04_modelling/transfer/data_loader.py | 82 ++++++------------- 3 files changed, 74 insertions(+), 68 deletions(-) diff --git a/src/traffic_analysis/d02_ref/ref_utils.py b/src/traffic_analysis/d02_ref/ref_utils.py index 5f7a83a..0f50ec4 100644 --- a/src/traffic_analysis/d02_ref/ref_utils.py +++ b/src/traffic_analysis/d02_ref/ref_utils.py @@ -5,6 +5,8 @@ import time as Time from subprocess import Popen, PIPE +from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 + def upload_json_to_s3(paths, save_name, selected_files): """ save json file to s3 @@ -71,3 +73,44 @@ def get_names_of_folder_content_from_s3(bucket_name, prefix, s3_profile): return elapsed_time, files +def get_s3_video_path_from_xml_name(xml_file_name, s3_creds, paths): + + # Supports old and new naming conventions + vals = xml_file_name.split('_') + data_loader_s3 = DataLoaderS3(s3_credentials=s3_creds, + bucket_name=paths['bucket_name']) + + if (len(vals) >= 4): + date = vals[1] + file_names = [xml_file_name.split('_')[1:][0].replace('-', '') + '-' + + xml_file_name.split('_')[1:][1].replace('-', '')[:6] + '_' + + xml_file_name.split('_')[1:][2], + xml_file_name.split('_')[1:][0] + ' ' + + xml_file_name.split('_')[1:][1].replace('-', ':') + '_' + + xml_file_name.split('_')[1:][2]] + else: + date = vals[0] + file_names = [xml_file_name.split('_')[0].replace('-', '') + '-' + + xml_file_name.split('_')[1].replace('-', '')[:6] + '_' + + xml_file_name.split('_')[2], + xml_file_name.split('_')[0] + ' ' + + xml_file_name.split('_')[1].replace('-', ':') + '_' + + xml_file_name.split('_')[2]] + file_to_download = paths['s3_video'] + \ + date + '/' + \ + file_names[0] + '.mp4' + + if(data_loader_s3.file_exists(file_to_download)): + return file_to_download + + else: + file_to_download = paths['s3_video'] + \ + date + '/' + \ + file_names[1] + '.mp4' + + if (data_loader_s3.file_exists(file_to_download)): + return file_to_download + else: + print('Could not download file: ' + xml_file_name) + return + diff --git a/src/traffic_analysis/d02_ref/upload_annotation_names_to_s3.py b/src/traffic_analysis/d02_ref/upload_annotation_names_to_s3.py index ee99646..f01c662 100644 --- a/src/traffic_analysis/d02_ref/upload_annotation_names_to_s3.py +++ b/src/traffic_analysis/d02_ref/upload_annotation_names_to_s3.py @@ -1,5 +1,6 @@ from traffic_analysis.d02_ref.ref_utils import get_names_of_folder_content_from_s3 from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 +from traffic_analysis.d02_ref.ref_utils import get_s3_video_path_from_xml_name def upload_annotation_names_to_s3(paths, @@ -11,7 +12,7 @@ def upload_annotation_names_to_s3(paths, Returns: - """ + """ bucket_name = paths['bucket_name'] s3_profile = paths['s3_profile'] @@ -24,15 +25,11 @@ def upload_annotation_names_to_s3(paths, selected_files = [] for file in files: - if file: - vals = file.split('_') - if len(vals) == 4: - vals = vals[1:] - date = vals[0] - time = vals[1].replace('-', ':') - name = date + ' ' + time + '_' + vals[2] - name = name.replace('.xml', '.mp4') - selected_files.append(paths['s3_video'] + date + '/' + name) + video_file = get_s3_video_path_from_xml_name(xml_file_name=file, + s3_creds=s3_credentials, + paths=paths) + if(video_file): + selected_files.append(video_file) dl = DataLoaderS3(s3_credentials, bucket_name=paths['bucket_name']) diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer/data_loader.py index b5969b0..303a2e8 100644 --- a/src/traffic_analysis/d04_modelling/transfer/data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer/data_loader.py @@ -5,6 +5,7 @@ from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir, mp4_to_npy +from traffic_analysis.d02_ref.ref_utils import get_s3_video_path_from_xml_name from enum import Enum @@ -30,13 +31,7 @@ def __init__(self, datasets, creds, paths): def get_train_and_test(self, train_fraction): - delete_and_recreate_dir(self.paths['temp_annotation']) - delete_and_recreate_dir(self.paths['temp_raw_images']) - delete_and_recreate_dir(self.paths['temp_raw_video']) x, y = self.load_data_from_s3() - delete_and_recreate_dir(self.paths['temp_annotation']) - delete_and_recreate_dir(self.paths['temp_raw_images']) - delete_and_recreate_dir(self.paths['temp_raw_video']) x_train = x[:int(len(x) * train_fraction)] y_train = y[:int(len(x) * train_fraction)] @@ -48,6 +43,8 @@ def get_train_and_test(self, train_fraction): def load_data_from_s3(self): + self.clear_temp_folders() + xs = [] ys = [] for dataset in self.datasets: @@ -57,8 +54,15 @@ def load_data_from_s3(self): xs += x ys += y + self.clear_temp_folders() + return xs, ys + def clear_temp_folders(self): + delete_and_recreate_dir(self.paths['temp_annotation']) + delete_and_recreate_dir(self.paths['temp_raw_images']) + delete_and_recreate_dir(self.paths['temp_raw_video']) + def load_detrac_data(self): print('Parsing detrac xmls...') @@ -134,9 +138,9 @@ def parse_detrac_xml_file(self, xml_file): height = float(frame_obj.find('box').attrib['height']) x_min = left - y_min = top - height + y_min = top x_max = left + width - y_max = top + y_max = top + height result += ' ' + str(vehicle_type) + \ ' ' + str(x_min) + \ @@ -180,49 +184,15 @@ def load_cvat_data(self): return x, y - def get_cvat_video(self, id): - vals = id.split('_') - if (len(vals) >= 4): - date = vals[1] - file_names = [id.split('_')[1:][0].replace('-', '') + '-' + - id.split('_')[1:][1].replace('-', '')[:6] + '_' + - id.split('_')[1:][2], - id.split('_')[1:][0] + ' ' + - id.split('_')[1:][1].replace('-', ':') + '_' + - id.split('_')[1:][2]] - else: - date = vals[0] - file_names = [id.split('_')[0].replace('-', '') + '-' + - id.split('_')[1].replace('-', '')[:6] + '_' + - id.split('_')[2], - id.split('_')[0] + ' ' + - id.split('_')[1].replace('-', ':') + '_' + - id.split('_')[2]] - file_to_download = paths['s3_video'] + \ - date + '/' + \ - file_names[0] + '.mp4' - download_file_to = paths['temp_raw_video'] + \ - file_names[0] + '.mp4' - try: - self.data_loader_s3.download_file( - path_of_file_to_download=file_to_download, - path_to_download_file_to=download_file_to) - return mp4_to_npy(download_file_to) - - except: - try: - file_to_download = paths['s3_video'] + \ - date + '/' + \ - file_names[1] + '.mp4' - - self.data_loader_s3.download_file( - path_of_file_to_download=file_to_download, - path_to_download_file_to=download_file_to) - return mp4_to_npy(download_file_to) + def get_cvat_video(self, xml_file_name): - except: - print('Could not download file: ' + id) - return + video_path = get_s3_video_path_from_xml_name(xml_file_name=xml_file_name, s3_creds=self.creds[paths['s3_creds']], paths=self.paths) + if(video_path): + download_file_to = paths['temp_raw_video'] + 'test' + '.mp4' + self.data_loader_s3.download_file(path_of_file_to_download=video_path, path_to_download_file_to=download_file_to) + return mp4_to_npy(download_file_to) + else: + return def parse_cvat_xml_file(self, xml_file): @@ -235,9 +205,6 @@ def parse_cvat_xml_file(self, xml_file): print("Could not download file " + xml_file) root = ET.parse(path).getroot() - - results = [] - im_path = path.split('/')[-1][:-4] im_width = 250 im_height = 250 @@ -257,9 +224,9 @@ def parse_cvat_xml_file(self, xml_file): vehicle_type = frame.findall('attribute')[2].text x_min = float(frame.attrib['xtl']) - y_min = float(frame.attrib['ybr']) + y_min = float(frame.attrib['ytl']) x_max = float(frame.attrib['xbr']) - y_max = float(frame.attrib['ytl']) + y_max = float(frame.attrib['ybr']) frame_dict[frame_num] += ' ' + str(vehicle_type) + \ ' ' + str(x_min) + \ @@ -279,7 +246,6 @@ def parse_cvat_xml_file(self, xml_file): paths = load_paths() creds = load_credentials() -dl = DataLoader(datasets=[TransferDataset.cvat], creds=creds, paths=paths) +dl = DataLoader(datasets=[TransferDataset.cvat, TransferDataset.detrac], creds=creds, paths=paths) x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) - -#TODO how should I actually interpret the dimensions? +print('Done') From 6c0e245764439c91a6cf08ffc8a1328cbc4624a0 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Fri, 9 Aug 2019 17:53:12 +0100 Subject: [PATCH 11/65] refactoring --- .../d04_modelling/{transfer => transfer_learning}/data_loader.py | 0 .../d04_modelling/{transfer => transfer_learning}/example.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename src/traffic_analysis/d04_modelling/{transfer => transfer_learning}/data_loader.py (100%) rename src/traffic_analysis/d04_modelling/{transfer => transfer_learning}/example.py (100%) diff --git a/src/traffic_analysis/d04_modelling/transfer/data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/data_loader.py similarity index 100% rename from src/traffic_analysis/d04_modelling/transfer/data_loader.py rename to src/traffic_analysis/d04_modelling/transfer_learning/data_loader.py diff --git a/src/traffic_analysis/d04_modelling/transfer/example.py b/src/traffic_analysis/d04_modelling/transfer_learning/example.py similarity index 100% rename from src/traffic_analysis/d04_modelling/transfer/example.py rename to src/traffic_analysis/d04_modelling/transfer_learning/example.py From fbbe0662705b5086181dccebc5650a2e199277e8 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 15:34:18 +0100 Subject: [PATCH 12/65] moved functions and other files from other github repo over --- conf/base/training_parameters.yml | 0 src/traffic_analysis/d04_modelling/transfer_learning/args.py | 0 .../{generate_tensorflow_model.py => tensorflow_model_loader.py} | 0 .../d04_modelling/transfer_learning/tensorflow_training_utils.py | 0 .../d04_modelling/transfer_learning/train_tensorflow_model.py | 0 .../transfer_learning/{data_loader.py => training_data_loader.py} | 0 6 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 conf/base/training_parameters.yml create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/args.py rename src/traffic_analysis/d04_modelling/transfer_learning/{generate_tensorflow_model.py => tensorflow_model_loader.py} (100%) create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py rename src/traffic_analysis/d04_modelling/transfer_learning/{data_loader.py => training_data_loader.py} (100%) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/args.py b/src/traffic_analysis/d04_modelling/transfer_learning/args.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/generate_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_model_loader.py similarity index 100% rename from src/traffic_analysis/d04_modelling/transfer_learning/generate_tensorflow_model.py rename to src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_model_loader.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py similarity index 100% rename from src/traffic_analysis/d04_modelling/transfer_learning/data_loader.py rename to src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py From cc2fa82166e5ef79717efe3f40a294bd4ade2c34 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 15:35:01 +0100 Subject: [PATCH 13/65] converted args.py to a .yml file --- conf/base/training_parameters.yml | 34 +++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index e69de29..66f7899 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -0,0 +1,34 @@ +training: + batch_size : 6 + img_size : [416, 416] # Images will be resized to `img_size` and fed to the network, size format: [width, height] + letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. + total_epoches : 100 + train_evaluation_step : 100 # Evaluate on the training batch after some steps. + val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. + save_epoch : 10 # Save the model after some epochs. + batch_norm_decay : 0.99 # decay in bn ops + weight_decay : 5e-4 # l2 weight decay + global_step : 0 # used when resuming training + warm_up_epoch : 3 # set to larger value if gradient explodes + num_threads : 10 # Number of threads for image processing used in tf.data pipeline. + prefetech_buffer : 5 # Prefetech_buffer used in tf.data pipeline. + +learning: + optimizer_name : 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] + save_optimizer : True # Whether to save the optimizer parameters into the checkpoint file. + learning_rate_init : 1e-4 + lr_type : 'piecewise' # Chosen from [fixed, exponential, cosine_decay, cosine_decay_restart, piecewise] + lr_decay_epoch : 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` and `cosine_decay_restart` lr_type. + lr_decay_factor : 0.96 # The learning rate decay factor. Used when chosen `exponential` lr_type. + lr_lower_bound : 1e-6 # The minimum learning rate. + pw_boundaries : [30, 50] # epoch based boundaries + pw_values : [learning_rate_init, 3e-5, 1e-5] + +validation: + # nms + nms_threshold : 0.45 # iou threshold in nms operation + score_threshold : 0.01 # threshold of the probability of the classes in nms operation, i.e. score = pred_confs * pred_probs. set lower for higher recall. + nms_topk : 150 # keep at most nms_topk outputs after nms + + # mAP eval + eval_threshold : 0.5 # the iou threshold applied in mAP evaluation \ No newline at end of file From e30fd46b126cb905ad620a27a603f79ea14f136a Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 15:36:23 +0100 Subject: [PATCH 14/65] refactoring changes --- .../d04_modelling/perform_detection_tensorflow.py | 2 +- .../transfer_learning/convert_darknet_to_tensorflow.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py b/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py index 20478b7..43e6d05 100644 --- a/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py +++ b/src/traffic_analysis/d04_modelling/perform_detection_tensorflow.py @@ -9,7 +9,7 @@ remove_overlapping_boxes, letterbox_resize from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors, \ yolov3_darknet_to_tensorflow -from traffic_analysis.d04_modelling.transfer_learning.generate_tensorflow_model import YoloV3 +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d04_modelling.perform_detection_opencv import label_detections, \ choose_objects_of_selected_labels diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/convert_darknet_to_tensorflow.py b/src/traffic_analysis/d04_modelling/transfer_learning/convert_darknet_to_tensorflow.py index d9f7da5..d3bf77d 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/convert_darknet_to_tensorflow.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/convert_darknet_to_tensorflow.py @@ -7,7 +7,7 @@ import tensorflow as tf import numpy as np -from traffic_analysis.d04_modelling.transfer_learning.generate_tensorflow_model import YoloV3 +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d02_ref.download_detection_model_from_s3 import download_detection_model_from_s3 From fbff3600bbd3f1ad016bc2acca48f025d38d5417 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 15:36:58 +0100 Subject: [PATCH 15/65] movement of files over from other github repo --- .../tensorflow_training_utils.py | 1192 +++++++++++++++++ .../train_tensorflow_model.py | 260 ++++ 2 files changed, 1452 insertions(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index e69de29..f3ac843 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -0,0 +1,1192 @@ +from __future__ import division, print_function + +import numpy as np +import sys +import cv2 +import random +import tensorflow as tf +from tensorflow.core.framework import summary_pb2 +from collections import Counter + +PY_VERSION = sys.version_info[0] +iter_cnt = 0 + + +def get_batch_data(batch_line, class_num, img_size, anchors, mode, multi_scale=False, mix_up=False, letterbox_resize=True, interval=10): + ''' + generate a batch of imgs and labels + param: + batch_line: a batch of lines from train/val.txt files + class_num: num of total classes. + img_size: the image size to be resized to. format: [width, height]. + anchors: anchors. shape: [9, 2]. + mode: 'train' or 'val'. if set to 'train', data augmentation will be applied. + multi_scale: whether to use multi_scale training, img_size varies from [320, 320] to [640, 640] by default. Note that it will take effect only when mode is set to 'train'. + letterbox_resize: whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. + interval: change the scale of image every interval batches. Note that it's indeterministic because of the multi threading. + ''' + global iter_cnt + # multi_scale training + if multi_scale and mode == 'train': + random.seed(iter_cnt // interval) + random_img_size = [[x * 32, x * 32] for x in range(10, 20)] + img_size = random.sample(random_img_size, 1)[0] + iter_cnt += 1 + + img_idx_batch, img_batch, y_true_13_batch, y_true_26_batch, y_true_52_batch = [], [], [], [], [] + + # mix up strategy + if mix_up and mode == 'train': + mix_lines = [] + batch_line = batch_line.tolist() + for idx, line in enumerate(batch_line): + if np.random.uniform(0, 1) < 0.5: + mix_lines.append([line, random.sample(batch_line[:idx] + batch_line[idx+1:], 1)[0]]) + else: + mix_lines.append(line) + batch_line = mix_lines + + for line in batch_line: + img_idx, img, y_true_13, y_true_26, y_true_52 = parse_data(line, class_num, img_size, anchors, mode, letterbox_resize) + + img_idx_batch.append(img_idx) + img_batch.append(img) + y_true_13_batch.append(y_true_13) + y_true_26_batch.append(y_true_26) + y_true_52_batch.append(y_true_52) + + img_idx_batch, img_batch, y_true_13_batch, y_true_26_batch, y_true_52_batch = np.asarray(img_idx_batch, np.int64), np.asarray(img_batch), np.asarray(y_true_13_batch), np.asarray(y_true_26_batch), np.asarray(y_true_52_batch) + + return img_idx_batch, img_batch, y_true_13_batch, y_true_26_batch, y_true_52_batch + + +def parse_data(line, class_num, img_size, anchors, mode, letterbox_resize): + ''' + param: + line: a line from the training/test txt file + class_num: totol class nums. + img_size: the size of image to be resized to. [width, height] format. + anchors: anchors. + mode: 'train' or 'val'. When set to 'train', data_augmentation will be applied. + letterbox_resize: whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. + ''' + if not isinstance(line, list): + img_idx, pic_path, boxes, labels, _, _ = parse_line(line) + img = cv2.imread(pic_path) + # expand the 2nd dimension, mix up weight default to 1. + boxes = np.concatenate((boxes, np.full(shape=(boxes.shape[0], 1), fill_value=1., dtype=np.float32)), axis=-1) + else: + # the mix up case + _, pic_path1, boxes1, labels1, _, _ = parse_line(line[0]) + img1 = cv2.imread(pic_path1) + img_idx, pic_path2, boxes2, labels2, _, _ = parse_line(line[1]) + img2 = cv2.imread(pic_path2) + + img, boxes = mix_up(img1, img2, boxes1, boxes2) + labels = np.concatenate((labels1, labels2)) + + if mode == 'train': + # random color jittering + # NOTE: applying color distort may lead to bad performance sometimes + img = random_color_distort(img) + + # random expansion with prob 0.5 + if np.random.uniform(0, 1) > 0.5: + img, boxes = random_expand(img, boxes, 4) + + # random cropping + h, w, _ = img.shape + boxes, crop = random_crop_with_constraints(boxes, (w, h)) + x0, y0, w, h = crop + img = img[y0: y0+h, x0: x0+w] + + # resize with random interpolation + h, w, _ = img.shape + interp = np.random.randint(0, 5) + img, boxes = resize_with_bbox(img, boxes, img_size[0], img_size[1], interp=interp, letterbox=letterbox_resize) + + # random horizontal flip + h, w, _ = img.shape + img, boxes = random_flip(img, boxes, px=0.5) + else: + img, boxes = resize_with_bbox(img, boxes, img_size[0], img_size[1], interp=1, letterbox=letterbox_resize) + + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB).astype(np.float32) + + # the input of yolo_v3 should be in range 0~1 + img = img / 255. + + y_true_13, y_true_26, y_true_52 = process_box(boxes, labels, img_size, class_num, anchors) + + return img_idx, img, y_true_13, y_true_26, y_true_52 + + +def process_box(boxes, labels, img_size, class_num, anchors): + ''' + Generate the y_true label, i.e. the ground truth feature_maps in 3 different scales. + params: + boxes: [N, 5] shape, float32 dtype. `x_min, y_min, x_max, y_mix, mixup_weight`. + labels: [N] shape, int64 dtype. + class_num: int64 num. + anchors: [9, 4] shape, float32 dtype. + ''' + anchors_mask = [[6, 7, 8], [3, 4, 5], [0, 1, 2]] + + # convert boxes form: + # shape: [N, 2] + # (x_center, y_center) + box_centers = (boxes[:, 0:2] + boxes[:, 2:4]) / 2 + # (width, height) + box_sizes = boxes[:, 2:4] - boxes[:, 0:2] + + # [13, 13, 3, 5+num_class+1] `5` means coords and labels. `1` means mix up weight. + y_true_13 = np.zeros((img_size[1] // 32, img_size[0] // 32, 3, 6 + class_num), np.float32) + y_true_26 = np.zeros((img_size[1] // 16, img_size[0] // 16, 3, 6 + class_num), np.float32) + y_true_52 = np.zeros((img_size[1] // 8, img_size[0] // 8, 3, 6 + class_num), np.float32) + + # mix up weight default to 1. + y_true_13[..., -1] = 1. + y_true_26[..., -1] = 1. + y_true_52[..., -1] = 1. + + y_true = [y_true_13, y_true_26, y_true_52] + + # [N, 1, 2] + box_sizes = np.expand_dims(box_sizes, 1) + # broadcast tricks + # [N, 1, 2] & [9, 2] ==> [N, 9, 2] + mins = np.maximum(- box_sizes / 2, - anchors / 2) + maxs = np.minimum(box_sizes / 2, anchors / 2) + # [N, 9, 2] + whs = maxs - mins + + # [N, 9] + iou = (whs[:, :, 0] * whs[:, :, 1]) / ( + box_sizes[:, :, 0] * box_sizes[:, :, 1] + anchors[:, 0] * anchors[:, 1] - whs[:, :, 0] * whs[:, :, + 1] + 1e-10) + # [N] + best_match_idx = np.argmax(iou, axis=1) + + ratio_dict = {1.: 8., 2.: 16., 3.: 32.} + for i, idx in enumerate(best_match_idx): + # idx: 0,1,2 ==> 2; 3,4,5 ==> 1; 6,7,8 ==> 0 + feature_map_group = 2 - idx // 3 + # scale ratio: 0,1,2 ==> 8; 3,4,5 ==> 16; 6,7,8 ==> 32 + ratio = ratio_dict[np.ceil((idx + 1) / 3.)] + x = int(np.floor(box_centers[i, 0] / ratio)) + y = int(np.floor(box_centers[i, 1] / ratio)) + k = anchors_mask[feature_map_group].index(idx) + c = labels[i] + # print(feature_map_group, '|', y,x,k,c) + + y_true[feature_map_group][y, x, k, :2] = box_centers[i] + y_true[feature_map_group][y, x, k, 2:4] = box_sizes[i] + y_true[feature_map_group][y, x, k, 4] = 1. + y_true[feature_map_group][y, x, k, 5 + c] = 1. + y_true[feature_map_group][y, x, k, -1] = boxes[i, -1] + + return y_true_13, y_true_26, y_true_52 + + +def parse_line(line): + ''' + Given a line from the training/test txt file, return parsed info. + line format: line_index, img_path, img_width, img_height, [box_info_1 (5 number)], ... + return: + line_idx: int64 + pic_path: string. + boxes: shape [N, 4], N is the ground truth count, elements in the second + dimension are [x_min, y_min, x_max, y_max] + labels: shape [N]. class index. + img_width: int. + img_height: int + ''' + if 'str' not in str(type(line)): + line = line.decode() + s = line.strip().split(' ') + assert len(s) > 8, 'Annotation error! Please check your annotation file. Make sure there is at least one target object in each image.' + line_idx = int(s[0]) + pic_path = s[1] + img_width = int(s[2]) + img_height = int(s[3]) + s = s[4:] + assert len(s) % 5 == 0, 'Annotation error! Please check your annotation file. Maybe partially missing some coordinates?' + box_cnt = len(s) // 5 + boxes = [] + labels = [] + for i in range(box_cnt): + label, x_min, y_min, x_max, y_max = int(s[i * 5]), float(s[i * 5 + 1]), float(s[i * 5 + 2]), float( + s[i * 5 + 3]), float(s[i * 5 + 4]) + boxes.append([x_min, y_min, x_max, y_max]) + labels.append(label) + boxes = np.asarray(boxes, np.float32) + labels = np.asarray(labels, np.int64) + return line_idx, pic_path, boxes, labels, img_width, img_height + + +def mix_up(img1, img2, bbox1, bbox2): + ''' + return: + mix_img: HWC format mix up image + mix_bbox: [N, 5] shape mix up bbox, i.e. `x_min, y_min, x_max, y_mix, mixup_weight`. + ''' + height = max(img1.shape[0], img2.shape[0]) + width = max(img1.shape[1], img2.shape[1]) + + mix_img = np.zeros(shape=(height, width, 3), dtype='float32') + + # rand_num = np.random.random() + rand_num = np.random.beta(1.5, 1.5) + rand_num = max(0, min(1, rand_num)) + mix_img[:img1.shape[0], :img1.shape[1], :] = img1.astype('float32') * rand_num + mix_img[:img2.shape[0], :img2.shape[1], :] += img2.astype('float32') * (1. - rand_num) + + mix_img = mix_img.astype('uint8') + + # the last element of the 2nd dimention is the mix up weight + bbox1 = np.concatenate((bbox1, np.full(shape=(bbox1.shape[0], 1), fill_value=rand_num)), axis=-1) + bbox2 = np.concatenate((bbox2, np.full(shape=(bbox2.shape[0], 1), fill_value=1. - rand_num)), axis=-1) + mix_bbox = np.concatenate((bbox1, bbox2), axis=0) + + return mix_img, mix_bbox + + +def bbox_crop(bbox, crop_box=None, allow_outside_center=True): + """Crop bounding boxes according to slice area. + This method is mainly used with image cropping to ensure bonding boxes fit + within the cropped image. + Parameters + ---------- + bbox : numpy.ndarray + Numpy.ndarray with shape (N, 4+) where N is the number of bounding boxes. + The second axis represents attributes of the bounding box. + Specifically, these are :math:`(x_{min}, y_{min}, x_{max}, y_{max})`, + we allow additional attributes other than coordinates, which stay intact + during bounding box transformations. + crop_box : tuple + Tuple of length 4. :math:`(x_{min}, y_{min}, width, height)` + allow_outside_center : bool + If `False`, remove bounding boxes which have centers outside cropping area. + Returns + ------- + numpy.ndarray + Cropped bounding boxes with shape (M, 4+) where M <= N. + """ + bbox = bbox.copy() + if crop_box is None: + return bbox + if not len(crop_box) == 4: + raise ValueError( + "Invalid crop_box parameter, requires length 4, given {}".format(str(crop_box))) + if sum([int(c is None) for c in crop_box]) == 4: + return bbox + + l, t, w, h = crop_box + + left = l if l else 0 + top = t if t else 0 + right = left + (w if w else np.inf) + bottom = top + (h if h else np.inf) + crop_bbox = np.array((left, top, right, bottom)) + + if allow_outside_center: + mask = np.ones(bbox.shape[0], dtype=bool) + else: + centers = (bbox[:, :2] + bbox[:, 2:4]) / 2 + mask = np.logical_and(crop_bbox[:2] <= centers, centers < crop_bbox[2:]).all(axis=1) + + # transform borders + bbox[:, :2] = np.maximum(bbox[:, :2], crop_bbox[:2]) + bbox[:, 2:4] = np.minimum(bbox[:, 2:4], crop_bbox[2:4]) + bbox[:, :2] -= crop_bbox[:2] + bbox[:, 2:4] -= crop_bbox[:2] + + mask = np.logical_and(mask, (bbox[:, :2] < bbox[:, 2:4]).all(axis=1)) + bbox = bbox[mask] + return bbox + + +def bbox_iou(bbox_a, bbox_b, offset=0): + """Calculate Intersection-Over-Union(IOU) of two bounding boxes. + Parameters + ---------- + bbox_a : numpy.ndarray + An ndarray with shape :math:`(N, 4)`. + bbox_b : numpy.ndarray + An ndarray with shape :math:`(M, 4)`. + offset : float or int, default is 0 + The ``offset`` is used to control the whether the width(or height) is computed as + (right - left + ``offset``). + Note that the offset must be 0 for normalized bboxes, whose ranges are in ``[0, 1]``. + Returns + ------- + numpy.ndarray + An ndarray with shape :math:`(N, M)` indicates IOU between each pairs of + bounding boxes in `bbox_a` and `bbox_b`. + """ + if bbox_a.shape[1] < 4 or bbox_b.shape[1] < 4: + raise IndexError("Bounding boxes axis 1 must have at least length 4") + + tl = np.maximum(bbox_a[:, None, :2], bbox_b[:, :2]) + br = np.minimum(bbox_a[:, None, 2:4], bbox_b[:, 2:4]) + + area_i = np.prod(br - tl + offset, axis=2) * (tl < br).all(axis=2) + area_a = np.prod(bbox_a[:, 2:4] - bbox_a[:, :2] + offset, axis=1) + area_b = np.prod(bbox_b[:, 2:4] - bbox_b[:, :2] + offset, axis=1) + return area_i / (area_a[:, None] + area_b - area_i) + + +def random_crop_with_constraints(bbox, size, min_scale=0.3, max_scale=1, + max_aspect_ratio=2, constraints=None, + max_trial=50): + """Crop an image randomly with bounding box constraints. + This data augmentation is used in training of + Single Shot Multibox Detector [#]_. More details can be found in + data augmentation section of the original paper. + .. [#] Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, + Scott Reed, Cheng-Yang Fu, Alexander C. Berg. + SSD: Single Shot MultiBox Detector. ECCV 2016. + Parameters + ---------- + bbox : numpy.ndarray + Numpy.ndarray with shape (N, 4+) where N is the number of bounding boxes. + The second axis represents attributes of the bounding box. + Specifically, these are :math:`(x_{min}, y_{min}, x_{max}, y_{max})`, + we allow additional attributes other than coordinates, which stay intact + during bounding box transformations. + size : tuple + Tuple of length 2 of image shape as (width, height). + min_scale : float + The minimum ratio between a cropped region and the original image. + The default value is :obj:`0.3`. + max_scale : float + The maximum ratio between a cropped region and the original image. + The default value is :obj:`1`. + max_aspect_ratio : float + The maximum aspect ratio of cropped region. + The default value is :obj:`2`. + constraints : iterable of tuples + An iterable of constraints. + Each constraint should be :obj:`(min_iou, max_iou)` format. + If means no constraint if set :obj:`min_iou` or :obj:`max_iou` to :obj:`None`. + If this argument defaults to :obj:`None`, :obj:`((0.1, None), (0.3, None), + (0.5, None), (0.7, None), (0.9, None), (None, 1))` will be used. + max_trial : int + Maximum number of trials for each constraint before exit no matter what. + Returns + ------- + numpy.ndarray + Cropped bounding boxes with shape :obj:`(M, 4+)` where M <= N. + tuple + Tuple of length 4 as (x_offset, y_offset, new_width, new_height). + """ + # default params in paper + if constraints is None: + constraints = ( + (0.1, None), + (0.3, None), + (0.5, None), + (0.7, None), + (0.9, None), + (None, 1), + ) + + w, h = size + + candidates = [(0, 0, w, h)] + for min_iou, max_iou in constraints: + min_iou = -np.inf if min_iou is None else min_iou + max_iou = np.inf if max_iou is None else max_iou + + for _ in range(max_trial): + scale = random.uniform(min_scale, max_scale) + aspect_ratio = random.uniform( + max(1 / max_aspect_ratio, scale * scale), + min(max_aspect_ratio, 1 / (scale * scale))) + crop_h = int(h * scale / np.sqrt(aspect_ratio)) + crop_w = int(w * scale * np.sqrt(aspect_ratio)) + + crop_t = random.randrange(h - crop_h) + crop_l = random.randrange(w - crop_w) + crop_bb = np.array((crop_l, crop_t, crop_l + crop_w, crop_t + crop_h)) + + if len(bbox) == 0: + top, bottom = crop_t, crop_t + crop_h + left, right = crop_l, crop_l + crop_w + return bbox, (left, top, right-left, bottom-top) + + iou = bbox_iou(bbox, crop_bb[np.newaxis]) + if min_iou <= iou.min() and iou.max() <= max_iou: + top, bottom = crop_t, crop_t + crop_h + left, right = crop_l, crop_l + crop_w + candidates.append((left, top, right-left, bottom-top)) + break + + # random select one + while candidates: + crop = candidates.pop(np.random.randint(0, len(candidates))) + new_bbox = bbox_crop(bbox, crop, allow_outside_center=False) + if new_bbox.size < 1: + continue + new_crop = (crop[0], crop[1], crop[2], crop[3]) + return new_bbox, new_crop + return bbox, (0, 0, w, h) + + +def random_color_distort(img, brightness_delta=32, hue_vari=18, sat_vari=0.5, val_vari=0.5): + ''' + randomly distort image color. Adjust brightness, hue, saturation, value. + param: + img: a BGR uint8 format OpenCV image. HWC format. + ''' + + def random_hue(img_hsv, hue_vari, p=0.5): + if np.random.uniform(0, 1) > p: + hue_delta = np.random.randint(-hue_vari, hue_vari) + img_hsv[:, :, 0] = (img_hsv[:, :, 0] + hue_delta) % 180 + return img_hsv + + def random_saturation(img_hsv, sat_vari, p=0.5): + if np.random.uniform(0, 1) > p: + sat_mult = 1 + np.random.uniform(-sat_vari, sat_vari) + img_hsv[:, :, 1] *= sat_mult + return img_hsv + + def random_value(img_hsv, val_vari, p=0.5): + if np.random.uniform(0, 1) > p: + val_mult = 1 + np.random.uniform(-val_vari, val_vari) + img_hsv[:, :, 2] *= val_mult + return img_hsv + + def random_brightness(img, brightness_delta, p=0.5): + if np.random.uniform(0, 1) > p: + img = img.astype(np.float32) + brightness_delta = int(np.random.uniform(-brightness_delta, brightness_delta)) + img = img + brightness_delta + return np.clip(img, 0, 255) + + # brightness + img = random_brightness(img, brightness_delta) + img = img.astype(np.uint8) + + # color jitter + img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV).astype(np.float32) + + if np.random.randint(0, 2): + img_hsv = random_value(img_hsv, val_vari) + img_hsv = random_saturation(img_hsv, sat_vari) + img_hsv = random_hue(img_hsv, hue_vari) + else: + img_hsv = random_saturation(img_hsv, sat_vari) + img_hsv = random_hue(img_hsv, hue_vari) + img_hsv = random_value(img_hsv, val_vari) + + img_hsv = np.clip(img_hsv, 0, 255) + img = cv2.cvtColor(img_hsv.astype(np.uint8), cv2.COLOR_HSV2BGR) + + return img + + +def letterbox_resize(img, new_width, new_height, interp=0): + ''' + Letterbox resize. keep the original aspect ratio in the resized image. + ''' + ori_height, ori_width = img.shape[:2] + + resize_ratio = min(new_width / ori_width, new_height / ori_height) + + resize_w = int(resize_ratio * ori_width) + resize_h = int(resize_ratio * ori_height) + + img = cv2.resize(img, (resize_w, resize_h), interpolation=interp) + image_padded = np.full((new_height, new_width, 3), 128, np.uint8) + + dw = int((new_width - resize_w) / 2) + dh = int((new_height - resize_h) / 2) + + image_padded[dh: resize_h + dh, dw: resize_w + dw, :] = img + + return image_padded, resize_ratio, dw, dh + + +def resize_with_bbox(img, bbox, new_width, new_height, interp=0, letterbox=False): + ''' + Resize the image and correct the bbox accordingly. + ''' + + if letterbox: + image_padded, resize_ratio, dw, dh = letterbox_resize(img, new_width, new_height, interp) + + # xmin, xmax + bbox[:, [0, 2]] = bbox[:, [0, 2]] * resize_ratio + dw + # ymin, ymax + bbox[:, [1, 3]] = bbox[:, [1, 3]] * resize_ratio + dh + + return image_padded, bbox + else: + ori_height, ori_width = img.shape[:2] + + img = cv2.resize(img, (new_width, new_height), interpolation=interp) + + # xmin, xmax + bbox[:, [0, 2]] = bbox[:, [0, 2]] / ori_width * new_width + # ymin, ymax + bbox[:, [1, 3]] = bbox[:, [1, 3]] / ori_height * new_height + + return img, bbox + + +def random_flip(img, bbox, px=0, py=0): + ''' + Randomly flip the image and correct the bbox. + param: + px: + the probability of horizontal flip + py: + the probability of vertical flip + ''' + height, width = img.shape[:2] + if np.random.uniform(0, 1) < px: + img = cv2.flip(img, 1) + xmax = width - bbox[:, 0] + xmin = width - bbox[:, 2] + bbox[:, 0] = xmin + bbox[:, 2] = xmax + + if np.random.uniform(0, 1) < py: + img = cv2.flip(img, 0) + ymax = height - bbox[:, 1] + ymin = height - bbox[:, 3] + bbox[:, 1] = ymin + bbox[:, 3] = ymax + return img, bbox + + +def random_expand(img, bbox, max_ratio=4, fill=0, keep_ratio=True): + ''' + Random expand original image with borders, this is identical to placing + the original image on a larger canvas. + param: + max_ratio : + Maximum ratio of the output image on both direction(vertical and horizontal) + fill : + The value(s) for padded borders. + keep_ratio : bool + If `True`, will keep output image the same aspect ratio as input. + ''' + h, w, c = img.shape + ratio_x = random.uniform(1, max_ratio) + if keep_ratio: + ratio_y = ratio_x + else: + ratio_y = random.uniform(1, max_ratio) + + oh, ow = int(h * ratio_y), int(w * ratio_x) + off_y = random.randint(0, oh - h) + off_x = random.randint(0, ow - w) + + dst = np.full(shape=(oh, ow, c), fill_value=fill, dtype=img.dtype) + + dst[off_y:off_y + h, off_x:off_x + w, :] = img + + # correct bbox + bbox[:, :2] += (off_x, off_y) + bbox[:, 2:4] += (off_x, off_y) + + return dst, bbox + + +def make_summary(name, val): + return summary_pb2.Summary(value=[summary_pb2.Summary.Value(tag=name, simple_value=val)]) + + +def config_learning_rate(args, global_step): + if args.lr_type == 'exponential': + lr_tmp = tf.train.exponential_decay(args.learning_rate_init, global_step, args.lr_decay_freq, + args.lr_decay_factor, staircase=True, name='exponential_learning_rate') + return tf.maximum(lr_tmp, args.lr_lower_bound) + elif args.lr_type == 'cosine_decay': + train_steps = (args.total_epoches - float(args.use_warm_up) * args.warm_up_epoch) * args.train_batch_num + return args.lr_lower_bound + 0.5 * (args.learning_rate_init - args.lr_lower_bound) * \ + (1 + tf.cos(global_step / train_steps * np.pi)) + elif args.lr_type == 'cosine_decay_restart': + return tf.train.cosine_decay_restarts(args.learning_rate_init, global_step, + args.lr_decay_freq, t_mul=2.0, m_mul=1.0, + name='cosine_decay_learning_rate_restart') + elif args.lr_type == 'fixed': + return tf.convert_to_tensor(args.learning_rate_init, name='fixed_learning_rate') + elif args.lr_type == 'piecewise': + return tf.train.piecewise_constant(global_step, boundaries=args.pw_boundaries, values=args.pw_values, + name='piecewise_learning_rate') + else: + raise ValueError('Unsupported learning rate type!') + + +def config_optimizer(optimizer_name, learning_rate, decay=0.9, momentum=0.9): + if optimizer_name == 'momentum': + return tf.train.MomentumOptimizer(learning_rate, momentum=momentum) + elif optimizer_name == 'rmsprop': + return tf.train.RMSPropOptimizer(learning_rate, decay=decay, momentum=momentum) + elif optimizer_name == 'adam': + return tf.train.AdamOptimizer(learning_rate) + elif optimizer_name == 'sgd': + return tf.train.GradientDescentOptimizer(learning_rate) + else: + raise ValueError('Unsupported optimizer type!') + + +class AverageMeter(object): + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.average = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.average = self.sum / float(self.count) + + +def cpu_nms(boxes, scores, num_classes, max_boxes=50, score_thresh=0.5, iou_thresh=0.5): + """ + Perform NMS on CPU. + Arguments: + boxes: shape [1, 10647, 4] + scores: shape [1, 10647, num_classes] + """ + + boxes = boxes.reshape(-1, 4) + scores = scores.reshape(-1, num_classes) + # Picked bounding boxes + picked_boxes, picked_score, picked_label = [], [], [] + + for i in range(num_classes): + indices = np.where(scores[:,i] >= score_thresh) + filter_boxes = boxes[indices] + filter_scores = scores[:,i][indices] + if len(filter_boxes) == 0: + continue + # do non_max_suppression on the cpu + indices = py_nms(filter_boxes, filter_scores, + max_boxes=max_boxes, iou_thresh=iou_thresh) + picked_boxes.append(filter_boxes[indices]) + picked_score.append(filter_scores[indices]) + picked_label.append(np.ones(len(indices), dtype='int32')*i) + if len(picked_boxes) == 0: + return None, None, None + + boxes = np.concatenate(picked_boxes, axis=0) + score = np.concatenate(picked_score, axis=0) + label = np.concatenate(picked_label, axis=0) + + return boxes, score, label + + + +def py_nms(boxes, scores, max_boxes=50, iou_thresh=0.5): + """ + Pure Python NMS baseline. + + Arguments: boxes: shape of [-1, 4], the value of '-1' means that dont know the + exact number of boxes + scores: shape of [-1,] + max_boxes: representing the maximum of boxes to be selected by non_max_suppression + iou_thresh: representing iou_threshold for deciding to keep boxes + """ + assert boxes.shape[1] == 4 and len(scores.shape) == 1 + + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + + areas = (x2 - x1) * (y2 - y1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1 + 1) + h = np.maximum(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= iou_thresh)[0] + order = order[inds + 1] + + return keep[:max_boxes] + + +def calc_iou(pred_boxes, true_boxes): + ''' + Maintain an efficient way to calculate the ios matrix using the numpy broadcast tricks. + shape_info: pred_boxes: [N, 4] + true_boxes: [V, 4] + return: IoU matrix: shape: [N, V] + ''' + + # [N, 1, 4] + pred_boxes = np.expand_dims(pred_boxes, -2) + # [1, V, 4] + true_boxes = np.expand_dims(true_boxes, 0) + + # [N, 1, 2] & [1, V, 2] ==> [N, V, 2] + intersect_mins = np.maximum(pred_boxes[..., :2], true_boxes[..., :2]) + intersect_maxs = np.minimum(pred_boxes[..., 2:], true_boxes[..., 2:]) + intersect_wh = np.maximum(intersect_maxs - intersect_mins, 0.) + + # shape: [N, V] + intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1] + # shape: [N, 1, 2] + pred_box_wh = pred_boxes[..., 2:] - pred_boxes[..., :2] + # shape: [N, 1] + pred_box_area = pred_box_wh[..., 0] * pred_box_wh[..., 1] + # [1, V, 2] + true_boxes_wh = true_boxes[..., 2:] - true_boxes[..., :2] + # [1, V] + true_boxes_area = true_boxes_wh[..., 0] * true_boxes_wh[..., 1] + + # shape: [N, V] + iou = intersect_area / (pred_box_area + true_boxes_area - intersect_area + 1e-10) + + return iou + + +def evaluate_on_cpu(y_pred, y_true, num_classes, calc_now=True, max_boxes=50, score_thresh=0.5, iou_thresh=0.5): + ''' + Given y_pred and y_true of a batch of data, get the recall and precision of the current batch. + ''' + + num_images = y_true[0].shape[0] + true_labels_dict = {i: 0 for i in range(num_classes)} # {class: count} + pred_labels_dict = {i: 0 for i in range(num_classes)} + true_positive_dict = {i: 0 for i in range(num_classes)} + + for i in range(num_images): + true_labels_list, true_boxes_list = [], [] + for j in range(3): # three feature maps + # shape: [13, 13, 3, 80] + true_probs_temp = y_true[j][i][..., 5:-1] + # shape: [13, 13, 3, 4] (x_center, y_center, w, h) + true_boxes_temp = y_true[j][i][..., 0:4] + + # [13, 13, 3] + object_mask = true_probs_temp.sum(axis=-1) > 0 + + # [V, 3] V: Ground truth number of the current image + true_probs_temp = true_probs_temp[object_mask] + # [V, 4] + true_boxes_temp = true_boxes_temp[object_mask] + + # [V], labels + true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist() + # [V, 4] (x_center, y_center, w, h) + true_boxes_list += true_boxes_temp.tolist() + + if len(true_labels_list) != 0: + for cls, count in Counter(true_labels_list).items(): + true_labels_dict[cls] += count + + # [V, 4] (xmin, ymin, xmax, ymax) + true_boxes = np.array(true_boxes_list) + box_centers, box_sizes = true_boxes[:, 0:2], true_boxes[:, 2:4] + true_boxes[:, 0:2] = box_centers - box_sizes / 2. + true_boxes[:, 2:4] = true_boxes[:, 0:2] + box_sizes + + # [1, xxx, 4] + pred_boxes = y_pred[0][i:i + 1] + pred_confs = y_pred[1][i:i + 1] + pred_probs = y_pred[2][i:i + 1] + + # pred_boxes: [N, 4] + # pred_confs: [N] + # pred_labels: [N] + # N: Detected box number of the current image + pred_boxes, pred_confs, pred_labels = cpu_nms(pred_boxes, pred_confs * pred_probs, num_classes, + max_boxes=max_boxes, score_thresh=score_thresh, iou_thresh=iou_thresh) + + # len: N + pred_labels_list = [] if pred_labels is None else pred_labels.tolist() + if pred_labels_list == []: + continue + + # calc iou + # [N, V] + iou_matrix = calc_iou(pred_boxes, true_boxes) + # [N] + max_iou_idx = np.argmax(iou_matrix, axis=-1) + + correct_idx = [] + correct_conf = [] + for k in range(max_iou_idx.shape[0]): + pred_labels_dict[pred_labels_list[k]] += 1 + match_idx = max_iou_idx[k] # V level + if iou_matrix[k, match_idx] > iou_thresh and true_labels_list[match_idx] == pred_labels_list[k]: + if match_idx not in correct_idx: + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + else: + same_idx = correct_idx.index(match_idx) + if pred_confs[k] > correct_conf[same_idx]: + correct_idx.pop(same_idx) + correct_conf.pop(same_idx) + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + + for t in correct_idx: + true_positive_dict[true_labels_list[t]] += 1 + + if calc_now: + # avoid divided by 0 + recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6) + precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6) + + return recall, precision + else: + return true_positive_dict, true_labels_dict, pred_labels_dict + + +def evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, + y_pred, y_true, num_classes, iou_thresh=0.5, calc_now=True): + ''' + Given y_pred and y_true of a batch of data, get the recall and precision of the current batch. + This function will perform gpu operation on the GPU. + ''' + + num_images = y_true[0].shape[0] + true_labels_dict = {i: 0 for i in range(num_classes)} # {class: count} + pred_labels_dict = {i: 0 for i in range(num_classes)} + true_positive_dict = {i: 0 for i in range(num_classes)} + + for i in range(num_images): + true_labels_list, true_boxes_list = [], [] + for j in range(3): # three feature maps + # shape: [13, 13, 3, 80] + true_probs_temp = y_true[j][i][..., 5:-1] + # shape: [13, 13, 3, 4] (x_center, y_center, w, h) + true_boxes_temp = y_true[j][i][..., 0:4] + + # [13, 13, 3] + object_mask = true_probs_temp.sum(axis=-1) > 0 + + # [V, 80] V: Ground truth number of the current image + true_probs_temp = true_probs_temp[object_mask] + # [V, 4] + true_boxes_temp = true_boxes_temp[object_mask] + + # [V], labels, each from 0 to 79 + true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist() + # [V, 4] (x_center, y_center, w, h) + true_boxes_list += true_boxes_temp.tolist() + + if len(true_labels_list) != 0: + for cls, count in Counter(true_labels_list).items(): + true_labels_dict[cls] += count + + # [V, 4] (xmin, ymin, xmax, ymax) + true_boxes = np.array(true_boxes_list) + box_centers, box_sizes = true_boxes[:, 0:2], true_boxes[:, 2:4] + true_boxes[:, 0:2] = box_centers - box_sizes / 2. + true_boxes[:, 2:4] = true_boxes[:, 0:2] + box_sizes + + # [1, xxx, 4] + pred_boxes = y_pred[0][i:i + 1] + pred_confs = y_pred[1][i:i + 1] + pred_probs = y_pred[2][i:i + 1] + + # pred_boxes: [N, 4] + # pred_confs: [N] + # pred_labels: [N] + # N: Detected box number of the current image + pred_boxes, pred_confs, pred_labels = sess.run(gpu_nms_op, + feed_dict={pred_boxes_flag: pred_boxes, + pred_scores_flag: pred_confs * pred_probs}) + # len: N + pred_labels_list = [] if pred_labels is None else pred_labels.tolist() + if pred_labels_list == []: + continue + + # calc iou + # [N, V] + iou_matrix = calc_iou(pred_boxes, true_boxes) + # [N] + max_iou_idx = np.argmax(iou_matrix, axis=-1) + + correct_idx = [] + correct_conf = [] + for k in range(max_iou_idx.shape[0]): + pred_labels_dict[pred_labels_list[k]] += 1 + match_idx = max_iou_idx[k] # V level + if iou_matrix[k, match_idx] > iou_thresh and true_labels_list[match_idx] == pred_labels_list[k]: + if match_idx not in correct_idx: + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + else: + same_idx = correct_idx.index(match_idx) + if pred_confs[k] > correct_conf[same_idx]: + correct_idx.pop(same_idx) + correct_conf.pop(same_idx) + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + + for t in correct_idx: + true_positive_dict[true_labels_list[t]] += 1 + + if calc_now: + # avoid divided by 0 + recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6) + precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6) + + return recall, precision + else: + return true_positive_dict, true_labels_dict, pred_labels_dict + + +def voc_eval(gt_dict, val_preds, classidx, iou_thres=0.5, use_07_metric=False): + ''' + Top level function that does the PASCAL VOC evaluation. + ''' + # 1.obtain gt: extract all gt objects for this class + class_recs = {} + npos = 0 + for img_id in gt_dict: + R = [obj for obj in gt_dict[img_id] if obj[-1] == classidx] + bbox = np.array([x[:4] for x in R]) + det = [False] * len(R) + npos += len(R) + class_recs[img_id] = {'bbox': bbox, 'det': det} + + # 2. obtain pred results + pred = [x for x in val_preds if x[-1] == classidx] + img_ids = [x[0] for x in pred] + confidence = np.array([x[-2] for x in pred]) + BB = np.array([[x[1], x[2], x[3], x[4]] for x in pred]) + + # 3. sort by confidence + sorted_ind = np.argsort(-confidence) + try: + BB = BB[sorted_ind, :] + except: + print('no box, ignore') + return 1e-6, 1e-6, 0, 0, 0 + img_ids = [img_ids[x] for x in sorted_ind] + + # 4. mark TPs and FPs + nd = len(img_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + + for d in range(nd): + # all the gt info in some image + R = class_recs[img_ids[d]] + bb = BB[d, :] + ovmax = -np.Inf + BBGT = R['bbox'] + + if BBGT.size > 0: + # calc iou + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 2], bb[2]) + iymax = np.minimum(BBGT[:, 3], bb[3]) + iw = np.maximum(ixmax - ixmin + 1., 0.) + ih = np.maximum(iymax - iymin + 1., 0.) + inters = iw * ih + + # union + uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) + (BBGT[:, 2] - BBGT[:, 0] + 1.) * ( + BBGT[:, 3] - BBGT[:, 1] + 1.) - inters) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > iou_thres: + # gt not matched yet + if not R['det'][jmax]: + tp[d] = 1. + R['det'][jmax] = 1 + else: + fp[d] = 1. + else: + fp[d] = 1. + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) + ap = voc_ap(rec, prec, use_07_metric) + + # return rec, prec, ap + return npos, nd, tp[-1] / float(npos), tp[-1] / float(nd), ap + + + +gt_dict = {} # key: img_id, value: gt object list +def parse_gt_rec(gt_filename, target_img_size, letterbox_resize=True): + ''' + parse and re-organize the gt info. + return: + gt_dict: dict. Each key is a img_id, the value is the gt bboxes in the corresponding img. + ''' + + global gt_dict + + if not gt_dict: + new_width, new_height = target_img_size + with open(gt_filename, 'r') as f: + for line in f: + img_id, pic_path, boxes, labels, ori_width, ori_height = parse_line(line) + + objects = [] + for i in range(len(labels)): + x_min, y_min, x_max, y_max = boxes[i] + label = labels[i] + + if letterbox_resize: + resize_ratio = min(new_width / ori_width, new_height / ori_height) + + resize_w = int(resize_ratio * ori_width) + resize_h = int(resize_ratio * ori_height) + + dw = int((new_width - resize_w) / 2) + dh = int((new_height - resize_h) / 2) + + objects.append([x_min * resize_ratio + dw, + y_min * resize_ratio + dh, + x_max * resize_ratio + dw, + y_max * resize_ratio + dh, + label]) + else: + objects.append([x_min * new_width / ori_width, + y_min * new_height / ori_height, + x_max * new_width / ori_width, + y_max * new_height / ori_height, + label]) + gt_dict[img_id] = objects + return gt_dict + + +def gpu_nms(boxes, scores, num_classes, max_boxes=50, score_thresh=0.5, nms_thresh=0.5): + """ + Perform NMS on GPU using TensorFlow. + + params: + boxes: tensor of shape [1, 10647, 4] # 10647=(13*13+26*26+52*52)*3, for input 416*416 image + scores: tensor of shape [1, 10647, num_classes], score=conf*prob + num_classes: total number of classes + max_boxes: integer, maximum number of predicted boxes you'd like, default is 50 + score_thresh: if [ highest class probability score < score_threshold] + then get rid of the corresponding box + nms_thresh: real value, "intersection over union" threshold used for NMS filtering + """ + + boxes_list, label_list, score_list = [], [], [] + max_boxes = tf.constant(max_boxes, dtype='int32') + + # since we do nms for single image, then reshape it + boxes = tf.reshape(boxes, [-1, 4]) # '-1' means we don't konw the exact number of boxes + score = tf.reshape(scores, [-1, num_classes]) + + # Step 1: Create a filtering mask based on "box_class_scores" by using "threshold". + mask = tf.greater_equal(score, tf.constant(score_thresh)) + # Step 2: Do non_max_suppression for each class + for i in range(num_classes): + # Step 3: Apply the mask to scores, boxes and pick them out + filter_boxes = tf.boolean_mask(boxes, mask[:,i]) + filter_score = tf.boolean_mask(score[:,i], mask[:,i]) + nms_indices = tf.image.non_max_suppression(boxes=filter_boxes, + scores=filter_score, + max_output_size=max_boxes, + iou_threshold=nms_thresh, name='nms_indices') + label_list.append(tf.ones_like(tf.gather(filter_score, nms_indices), 'int32')*i) + boxes_list.append(tf.gather(filter_boxes, nms_indices)) + score_list.append(tf.gather(filter_score, nms_indices)) + + boxes = tf.concat(boxes_list, axis=0) + score = tf.concat(score_list, axis=0) + label = tf.concat(label_list, axis=0) + + return boxes, score, label + + +def get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, image_ids, y_pred): + ''' + Given the y_pred of an input image, get the predicted bbox and label info. + return: + pred_content: 2d list. + ''' + image_id = image_ids[0] + + # keep the first dimension 1 + pred_boxes = y_pred[0][0:1] + pred_confs = y_pred[1][0:1] + pred_probs = y_pred[2][0:1] + + boxes, scores, labels = sess.run(gpu_nms_op, + feed_dict={pred_boxes_flag: pred_boxes, + pred_scores_flag: pred_confs * pred_probs}) + + pred_content = [] + for i in range(len(labels)): + x_min, y_min, x_max, y_max = boxes[i] + score = scores[i] + label = labels[i] + pred_content.append([image_id, x_min, y_min, x_max, y_max, score, label]) + + return pred_content + + +def voc_ap(rec, prec, use_07_metric=False): + """Compute VOC AP given precision and recall. If use_07_metric is true, uses + the VOC 07 11-point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0. + for t in np.arange(0., 1.1, 0.1): + if np.sum(rec >= t) == 0: + p = 0 + else: + p = np.max(prec[rec >= t]) + ap = ap + p / 11. + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], rec, [1.])) + mpre = np.concatenate(([0.], prec, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + + +def shuffle_and_overwrite(file_name): + content = open(file_name, 'r').readlines() + random.shuffle(content) + with open(file_name, 'w') as f: + for line in content: + f.write(line) \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index e69de29..4e5a274 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -0,0 +1,260 @@ +# coding: utf-8 + +from __future__ import division, print_function + +import tensorflow as tf +import numpy as np +import logging +from tqdm import trange +import random + +import args + +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ + shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ + evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 + +### parse some params +anchors = parse_anchors(anchor_path) +classes = read_class_names(class_name_path) +class_num = len(classes) +train_img_cnt = len(open(train_file, 'r').readlines()) +val_img_cnt = len(open(val_file, 'r').readlines()) +train_batch_num = int(math.ceil(float(train_img_cnt) / batch_size)) + +lr_decay_freq = int(train_batch_num * lr_decay_epoch) +pw_boundaries = [float(i) * train_batch_num + global_step for i in pw_boundaries] + + +# setting loggers +logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', + datefmt='%a, %d %b %Y %H:%M:%S', filename=args.progress_log_path, filemode='w') + +# setting placeholders +is_training = tf.placeholder(tf.bool, name="phase_train") +handle_flag = tf.placeholder(tf.string, [], name='iterator_handle_flag') +# register the gpu nms operation here for the following evaluation scheme +pred_boxes_flag = tf.placeholder(tf.float32, [1, None, None]) +pred_scores_flag = tf.placeholder(tf.float32, [1, None, None]) +gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, args.class_num, args.nms_topk, args.score_threshold, args.nms_threshold) + +################## +# tf.data pipeline +################## +train_dataset = tf.data.TextLineDataset(args.train_file) +train_dataset = train_dataset.shuffle(args.train_img_cnt) +train_dataset = train_dataset.batch(args.batch_size) +train_dataset = train_dataset.map( + lambda x: tf.py_func(get_batch_data, + inp=[x, args.class_num, args.img_size, args.anchors, 'train', args.multi_scale_train, args.use_mix_up, args.letterbox_resize], + Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), + num_parallel_calls=args.num_threads +) +train_dataset = train_dataset.prefetch(args.prefetech_buffer) + +val_dataset = tf.data.TextLineDataset(args.val_file) +val_dataset = val_dataset.batch(1) +val_dataset = val_dataset.map( + lambda x: tf.py_func(get_batch_data, + inp=[x, args.class_num, args.img_size, args.anchors, 'val', False, False, args.letterbox_resize], + Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), + num_parallel_calls=args.num_threads +) +val_dataset.prefetch(args.prefetech_buffer) + +iterator = tf.data.Iterator.from_structure(train_dataset.output_types, train_dataset.output_shapes) +train_init_op = iterator.make_initializer(train_dataset) +val_init_op = iterator.make_initializer(val_dataset) + +# get an element from the chosen dataset iterator +image_ids, image, y_true_13, y_true_26, y_true_52 = iterator.get_next() +y_true = [y_true_13, y_true_26, y_true_52] + +# tf.data pipeline will lose the data `static` shape, so we need to set it manually +image_ids.set_shape([None]) +image.set_shape([None, None, None, 3]) +for y in y_true: + y.set_shape([None, None, None, None, None]) + +################## +# Model definition +################## +yolo_model = YoloV3(args.class_num, args.anchors, args.use_label_smooth, args.use_focal_loss, args.batch_norm_decay, args.weight_decay, use_static_shape=False) +with tf.variable_scope('yolov3'): + pred_feature_maps = yolo_model.forward(image, is_training=is_training) +loss = yolo_model.compute_loss(pred_feature_maps, y_true) +y_pred = yolo_model.predict(pred_feature_maps) + +l2_loss = tf.losses.get_regularization_loss() + +# setting restore parts and vars to update +saver_to_restore = tf.train.Saver(var_list=tf.contrib.framework.get_variables_to_restore(include=args.restore_include, exclude=args.restore_exclude)) +update_vars = tf.contrib.framework.get_variables_to_restore(include=args.update_part) + +tf.summary.scalar('train_batch_statistics/total_loss', loss[0]) +tf.summary.scalar('train_batch_statistics/loss_xy', loss[1]) +tf.summary.scalar('train_batch_statistics/loss_wh', loss[2]) +tf.summary.scalar('train_batch_statistics/loss_conf', loss[3]) +tf.summary.scalar('train_batch_statistics/loss_class', loss[4]) +tf.summary.scalar('train_batch_statistics/loss_l2', l2_loss) +tf.summary.scalar('train_batch_statistics/loss_ratio', l2_loss / loss[0]) + +global_step = tf.Variable(float(args.global_step), trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES]) +if args.use_warm_up: + learning_rate = tf.cond(tf.less(global_step, args.train_batch_num * args.warm_up_epoch), + lambda: args.learning_rate_init * global_step / (args.train_batch_num * args.warm_up_epoch), + lambda: config_learning_rate(args, global_step - args.train_batch_num * args.warm_up_epoch)) +else: + learning_rate = config_learning_rate(args, global_step) +tf.summary.scalar('learning_rate', learning_rate) + +if not args.save_optimizer: + saver_to_save = tf.train.Saver() + saver_best = tf.train.Saver() + +optimizer = config_optimizer(args.optimizer_name, learning_rate) + +# set dependencies for BN ops +update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) +with tf.control_dependencies(update_ops): + # train_op = optimizer.minimize(loss[0] + l2_loss, var_list=update_vars, global_step=global_step) + # apply gradient clip to avoid gradient exploding + gvs = optimizer.compute_gradients(loss[0] + l2_loss, var_list=update_vars) + clip_grad_var = [gv if gv[0] is None else [ + tf.clip_by_norm(gv[0], 100.), gv[1]] for gv in gvs] + train_op = optimizer.apply_gradients(clip_grad_var, global_step=global_step) + +if args.save_optimizer: + print('Saving optimizer parameters to checkpoint! Remember to restore the global_step in the fine-tuning afterwards.') + saver_to_save = tf.train.Saver() + saver_best = tf.train.Saver() + +with tf.Session() as sess: + sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) + saver_to_restore.restore(sess, args.restore_path) + merged = tf.summary.merge_all() + writer = tf.summary.FileWriter(args.log_dir, sess.graph) + + print('\n----------- start to train -----------\n') + + best_mAP = -np.Inf + + for epoch in range(args.total_epoches): + + sess.run(train_init_op) + loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter() + + for i in trange(args.train_batch_num): + _, summary, __y_pred, __y_true, __loss, __global_step, __lr = sess.run( + [train_op, merged, y_pred, y_true, loss, global_step, learning_rate], + feed_dict={is_training: True}) + + writer.add_summary(summary, global_step=__global_step) + + loss_total.update(__loss[0], len(__y_pred[0])) + loss_xy.update(__loss[1], len(__y_pred[0])) + loss_wh.update(__loss[2], len(__y_pred[0])) + loss_conf.update(__loss[3], len(__y_pred[0])) + loss_class.update(__loss[4], len(__y_pred[0])) + + if __global_step % args.train_evaluation_step == 0 and __global_step > 0: + # recall, precision = evaluate_on_cpu(__y_pred, __y_true, args.class_num, args.nms_topk, args.score_threshold, args.nms_threshold) + recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __y_pred, __y_true, args.class_num, args.nms_threshold) + + info = "Epoch: {}, global_step: {} | loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format( + epoch, int(__global_step), loss_total.average, loss_xy.average, loss_wh.average, loss_conf.average, loss_class.average) + info += 'Last batch: rec: {:.3f}, prec: {:.3f} | lr: {:.5g}'.format(recall, precision, __lr) + print(info) + logging.info(info) + + writer.add_summary(make_summary('evaluation/train_batch_recall', recall), global_step=__global_step) + writer.add_summary(make_summary('evaluation/train_batch_precision', precision), global_step=__global_step) + + if np.isnan(loss_total.average): + print('****' * 10) + raise ArithmeticError( + 'Gradient exploded! Please train again and you may need modify some parameters.') + + # NOTE: this is just demo. You can set the conditions when to save the weights. + if epoch % args.save_epoch == 0 and epoch > 0: + if loss_total.average <= 2.: + saver_to_save.save(sess, args.save_dir + 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr)) + + # switch to validation dataset for evaluation + if epoch % args.val_evaluation_epoch == 0 and epoch >= args.warm_up_epoch: + sess.run(val_init_op) + + val_loss_total, val_loss_xy, val_loss_wh, val_loss_conf, val_loss_class = \ + AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter() + + val_preds = [] + + for j in trange(args.val_img_cnt): + __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], + feed_dict={is_training: False}) + pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) + val_preds.extend(pred_content) + val_loss_total.update(__loss[0]) + val_loss_xy.update(__loss[1]) + val_loss_wh.update(__loss[2]) + val_loss_conf.update(__loss[3]) + val_loss_class.update(__loss[4]) + + # calc mAP + rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() + gt_dict = parse_gt_rec(args.val_file, args.img_size, args.letterbox_resize) + + info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) + + for ii in range(args.class_num): + npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=args.eval_threshold, use_07_metric=args.use_voc_07_metric) + info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) + rec_total.update(rec, npos) + prec_total.update(prec, nd) + ap_total.update(ap, 1) + + mAP = ap_total.average + info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) + info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( + val_loss_total.average, val_loss_xy.average, val_loss_wh.average, val_loss_conf.average, val_loss_class.average) + print(info) + logging.info(info) + + if mAP > best_mAP: + best_mAP = mAP + saver_best.save(sess, args.save_dir + 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format( + epoch, int(__global_step), best_mAP, val_loss_total.average, __lr)) + + writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) + writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), global_step=epoch) + writer.add_summary(make_summary('evaluation/val_precision', prec_total.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/total_loss', val_loss_total.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_xy', val_loss_xy.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_wh', val_loss_wh.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_conf', val_loss_conf.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_class', val_loss_class.average), global_step=epoch) + + +### Load and finetune +# Choose the parts you want to restore the weights. List form. +# restore_include: None, restore_exclude: None => restore the whole model +# restore_include: None, restore_exclude: scope => restore the whole model except `scope` +# restore_include: scope1, restore_exclude: scope2 => if scope1 contains scope2, restore scope1 and not restore scope2 (scope1 - scope2) +# choise 1: only restore the darknet body +# restore_include = ['yolov3/darknet53_body'] +# restore_exclude = None +# choise 2: restore all layers except the last 3 conv2d layers in 3 scale + restore_include = None + restore_exclude = ['yolov3/yolov3_head/Conv_14', 'yolov3/yolov3_head/Conv_6', 'yolov3/yolov3_head/Conv_22'] +# Choose the parts you want to finetune. List form. +# Set to None to train the whole model. + update_part = ['yolov3/yolov3_head'] + +### other training strategies + multi_scale_train = True # Whether to apply multi-scale training strategy. Image size varies from [320, 320] to [640, 640] by default. + use_label_smooth = True # Whether to use class label smoothing strategy. + use_focal_loss = True # Whether to apply focal loss on the conf loss. + use_mix_up = True # Whether to use mix up data augmentation strategy. + use_warm_up = True # whether to use warm up strategy to prevent from gradient exploding. + warm_up_epoch = 3 # Warm up training epoches. Set to a larger value if gradient explodes. \ No newline at end of file From 87c664ccad67016f8a935f75f18c002e7e8e3884 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 15:37:16 +0100 Subject: [PATCH 16/65] madea few changes to sam's script --- .../d04_modelling/transfer_learning/training_data_loader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 303a2e8..d000127 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -1,6 +1,7 @@ import xml.etree.ElementTree as ET from PIL import Image import numpy as np +from enum import Enum from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 @@ -8,7 +9,6 @@ from traffic_analysis.d02_ref.ref_utils import get_s3_video_path_from_xml_name -from enum import Enum class TransferDataset(Enum): detrac = 1 cvat = 2 @@ -246,6 +246,6 @@ def parse_cvat_xml_file(self, xml_file): paths = load_paths() creds = load_credentials() -dl = DataLoader(datasets=[TransferDataset.cvat, TransferDataset.detrac], creds=creds, paths=paths) +dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) print('Done') From dab965275b1a166460bc36586b78264f57830bae Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 15:38:51 +0100 Subject: [PATCH 17/65] added function to load training params --- src/traffic_analysis/d00_utils/load_confs.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/traffic_analysis/d00_utils/load_confs.py b/src/traffic_analysis/d00_utils/load_confs.py index 38828ae..a7d93b1 100644 --- a/src/traffic_analysis/d00_utils/load_confs.py +++ b/src/traffic_analysis/d00_utils/load_confs.py @@ -24,6 +24,12 @@ def load_app_parameters(): return {**params['visualization']} +def load_training_parameters(): + with open(project_dir + '/conf/base/training_parameters.yml') as f: + params = yaml.safe_load(f) + return collapse_dict_hierarchy(params) + + def load_credentials(): with open(project_dir + '/conf/local/credentials.yml') as f: From 483f86c2f3b59c69f6716dea9de5c0be18654c4b Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 16:44:08 +0100 Subject: [PATCH 18/65] converted everything away from args instead to the training_parameters yml --- conf/base/parameters.yml | 1 + conf/base/paths.yml | 1 + conf/base/training_parameters.yml | 1 - .../train_tensorflow_model.py | 169 ++++++++---------- 4 files changed, 81 insertions(+), 91 deletions(-) diff --git a/conf/base/parameters.yml b/conf/base/parameters.yml index 672ded5..b674aa5 100644 --- a/conf/base/parameters.yml +++ b/conf/base/parameters.yml @@ -42,6 +42,7 @@ modelling: detection_iou_threshold: 0.05 detection_confidence_threshold: 0.2 detection_nms_threshold: 0.2 + trained_model_name: "yolov3_ldn_traffic" # tracking selected_labels: ["car", "truck", "bus", "motorbike"] diff --git a/conf/base/paths.yml b/conf/base/paths.yml index fbfa4a8..b247d87 100644 --- a/conf/base/paths.yml +++ b/conf/base/paths.yml @@ -23,6 +23,7 @@ local_paths: plots: "plots/" annotations: "annotations/" local_detection_model: "data/ref/detection_model/" + training: "data/ref/training/" db_paths: db_host: "dssg-london.ck0oseycrr7s.eu-west-2.rds.amazonaws.com" diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index 66f7899..3215e89 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,6 +1,5 @@ training: batch_size : 6 - img_size : [416, 416] # Images will be resized to `img_size` and fed to the network, size format: [width, height] letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. total_epoches : 100 train_evaluation_step : 100 # Evaluate on the training batch after some steps. diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 4e5a274..61691e3 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -1,67 +1,71 @@ # coding: utf-8 from __future__ import division, print_function - +import os +import math import tensorflow as tf import numpy as np import logging from tqdm import trange import random -import args - from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 +from traffic_analysis.d00_utils.load_confs import load_parameters, load_paths, load_training_parameters +from traffic_analysis.d04_modelling.perform_detection_tensorflow import parse_anchors, read_class_names + -### parse some params -anchors = parse_anchors(anchor_path) +params = load_parameters() +train_params = load_training_parameters() +paths = load_paths() +train_dir_path = paths['training'] +class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') # CHANGE THIS classes = read_class_names(class_name_path) -class_num = len(classes) -train_img_cnt = len(open(train_file, 'r').readlines()) -val_img_cnt = len(open(val_file, 'r').readlines()) -train_batch_num = int(math.ceil(float(train_img_cnt) / batch_size)) +anchors = parse_anchors(paths) +number_classes = len(classes) -lr_decay_freq = int(train_batch_num * lr_decay_epoch) -pw_boundaries = [float(i) * train_batch_num + global_step for i in pw_boundaries] +train_data_path = os.path.join(train_dir_path, 'training_data.txt') +val_data_path = os.path.join(train_dir_path, 'val_data.txt') +train_img_cnt = len(open(train_data_path, 'r').readlines()) +val_img_cnt = len(open(val_data_path, 'r').readlines()) +train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['batch_size'])) +lr_decay_freq = int(train_batch_num * train_params['lr_decay_epoch']) -# setting loggers +if train_params['lr_type'] == 'piecewise': + pw_boundaries = [float(i) * train_batch_num + train_params['global_step'] for i in train_params['pw_boundaries']] + +logging_file_path = os.path.join(train_dir_path, 'progress.log') logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', - datefmt='%a, %d %b %Y %H:%M:%S', filename=args.progress_log_path, filemode='w') + datefmt='%a, %d %b %Y %H:%M:%S', filename=logging_file_path, filemode='w') -# setting placeholders is_training = tf.placeholder(tf.bool, name="phase_train") handle_flag = tf.placeholder(tf.string, [], name='iterator_handle_flag') -# register the gpu nms operation here for the following evaluation scheme pred_boxes_flag = tf.placeholder(tf.float32, [1, None, None]) pred_scores_flag = tf.placeholder(tf.float32, [1, None, None]) -gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, args.class_num, args.nms_topk, args.score_threshold, args.nms_threshold) - -################## -# tf.data pipeline -################## -train_dataset = tf.data.TextLineDataset(args.train_file) -train_dataset = train_dataset.shuffle(args.train_img_cnt) -train_dataset = train_dataset.batch(args.batch_size) +gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, number_classes, train_params['nms_topk'], + train_params['score_threshold'], train_params['nms_threshold']) + +train_dataset = tf.data.TextLineDataset(train_dir_path) +train_dataset = train_dataset.shuffle(train_img_cnt) +train_dataset = train_dataset.batch(train_params['batch_size']) train_dataset = train_dataset.map( lambda x: tf.py_func(get_batch_data, - inp=[x, args.class_num, args.img_size, args.anchors, 'train', args.multi_scale_train, args.use_mix_up, args.letterbox_resize], + inp=[x, number_classes, [416, 416], anchors, 'train', True, True, True], Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), - num_parallel_calls=args.num_threads -) -train_dataset = train_dataset.prefetch(args.prefetech_buffer) + num_parallel_calls=train_params['num_threads']) +train_dataset = train_dataset.prefetch(train_params['prefetech_buffer']) -val_dataset = tf.data.TextLineDataset(args.val_file) +val_dataset = tf.data.TextLineDataset(val_data_path) val_dataset = val_dataset.batch(1) val_dataset = val_dataset.map( lambda x: tf.py_func(get_batch_data, - inp=[x, args.class_num, args.img_size, args.anchors, 'val', False, False, args.letterbox_resize], + inp=[x, number_classes, [416, 416], anchors, 'val', False, False, True], Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), - num_parallel_calls=args.num_threads -) -val_dataset.prefetch(args.prefetech_buffer) + num_parallel_calls=train_params['num_threads']) +val_dataset.prefetch(train_params['prefetech_buffer']) iterator = tf.data.Iterator.from_structure(train_dataset.output_types, train_dataset.output_shapes) train_init_op = iterator.make_initializer(train_dataset) @@ -77,10 +81,11 @@ for y in y_true: y.set_shape([None, None, None, None, None]) -################## -# Model definition -################## -yolo_model = YoloV3(args.class_num, args.anchors, args.use_label_smooth, args.use_focal_loss, args.batch_norm_decay, args.weight_decay, use_static_shape=False) +# define model +yolo_model = YoloV3(number_classes, anchors, use_label_smooth=True, use_focal_loss=True, + batch_norm_decay=train_params['batch_norm_decay'], weight_decay=train_params['weight_decay'], + use_static_shape=False) + with tf.variable_scope('yolov3'): pred_feature_maps = yolo_model.forward(image, is_training=is_training) loss = yolo_model.compute_loss(pred_feature_maps, y_true) @@ -89,8 +94,8 @@ l2_loss = tf.losses.get_regularization_loss() # setting restore parts and vars to update -saver_to_restore = tf.train.Saver(var_list=tf.contrib.framework.get_variables_to_restore(include=args.restore_include, exclude=args.restore_exclude)) -update_vars = tf.contrib.framework.get_variables_to_restore(include=args.update_part) +saver_to_restore = tf.train.Saver(var_list=tf.contrib.framework.get_variables_to_restore(include=None, exclude=None)) +update_vars = tf.contrib.framework.get_variables_to_restore(include=['yolov3/yolov3_head']) tf.summary.scalar('train_batch_statistics/total_loss', loss[0]) tf.summary.scalar('train_batch_statistics/loss_xy', loss[1]) @@ -100,20 +105,21 @@ tf.summary.scalar('train_batch_statistics/loss_l2', l2_loss) tf.summary.scalar('train_batch_statistics/loss_ratio', l2_loss / loss[0]) -global_step = tf.Variable(float(args.global_step), trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES]) -if args.use_warm_up: - learning_rate = tf.cond(tf.less(global_step, args.train_batch_num * args.warm_up_epoch), - lambda: args.learning_rate_init * global_step / (args.train_batch_num * args.warm_up_epoch), - lambda: config_learning_rate(args, global_step - args.train_batch_num * args.warm_up_epoch)) -else: - learning_rate = config_learning_rate(args, global_step) +global_step = tf.Variable(float(train_params['global_step']), + trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES]) + +learning_rate = tf.cond(tf.less(global_step, train_params['train_batch_num'] * train_params['warm_up_epoch']), + lambda: train_params['learning_rate_init'] * + global_step / (train_params['train_batch_num'] * train_params['warm_up_epoch']), + lambda: config_learning_rate(global_step - + train_params['train_batch_num'] * train_params['warm_up_epoch'])) tf.summary.scalar('learning_rate', learning_rate) -if not args.save_optimizer: +if not train_params['save_optimizer']: saver_to_save = tf.train.Saver() saver_best = tf.train.Saver() -optimizer = config_optimizer(args.optimizer_name, learning_rate) +optimizer = config_optimizer(train_params['optimizer_name'], learning_rate) # set dependencies for BN ops update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) @@ -125,27 +131,30 @@ tf.clip_by_norm(gv[0], 100.), gv[1]] for gv in gvs] train_op = optimizer.apply_gradients(clip_grad_var, global_step=global_step) -if args.save_optimizer: +if train_params['save_optimizer']: print('Saving optimizer parameters to checkpoint! Remember to restore the global_step in the fine-tuning afterwards.') saver_to_save = tf.train.Saver() saver_best = tf.train.Saver() +tensorboard_log_path = os.path.join(train_dir_path, 'tensorboard_logs') +yolov3_tensorflow_path = os.path.join(paths['local_detection_model'], params['detection_model'], 'yolov3.ckpt') with tf.Session() as sess: sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) - saver_to_restore.restore(sess, args.restore_path) + saver_to_restore.restore(sess, yolov3_tensorflow_path) merged = tf.summary.merge_all() - writer = tf.summary.FileWriter(args.log_dir, sess.graph) + writer = tf.summary.FileWriter(tensorboard_log_path, sess.graph) print('\n----------- start to train -----------\n') best_mAP = -np.Inf - for epoch in range(args.total_epoches): + for epoch in range(train_params['total_epoches']): sess.run(train_init_op) - loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter() + loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), \ + AverageMeter(), AverageMeter() - for i in trange(args.train_batch_num): + for i in trange(train_params['train_batch_num']): _, summary, __y_pred, __y_true, __loss, __global_step, __lr = sess.run( [train_op, merged, y_pred, y_true, loss, global_step, learning_rate], feed_dict={is_training: True}) @@ -158,11 +167,13 @@ loss_conf.update(__loss[3], len(__y_pred[0])) loss_class.update(__loss[4], len(__y_pred[0])) - if __global_step % args.train_evaluation_step == 0 and __global_step > 0: - # recall, precision = evaluate_on_cpu(__y_pred, __y_true, args.class_num, args.nms_topk, args.score_threshold, args.nms_threshold) - recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __y_pred, __y_true, args.class_num, args.nms_threshold) + if __global_step % train_params['train_evaluation_step'] == 0 and __global_step > 0: + # recall, precision = evaluate_on_cpu(__y_pred, __y_true, args.number_classes, args.nms_topk, args.score_threshold, args.nms_threshold) + recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, + __y_pred, __y_true, number_classes, train_params['nms_threshold']) - info = "Epoch: {}, global_step: {} | loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format( + info = "Epoch: {}, global_step: {} | loss: total: {:.2f}, xy: {:.2f}, " \ + "wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format( epoch, int(__global_step), loss_total.average, loss_xy.average, loss_wh.average, loss_conf.average, loss_class.average) info += 'Last batch: rec: {:.3f}, prec: {:.3f} | lr: {:.5g}'.format(recall, precision, __lr) print(info) @@ -177,12 +188,13 @@ 'Gradient exploded! Please train again and you may need modify some parameters.') # NOTE: this is just demo. You can set the conditions when to save the weights. - if epoch % args.save_epoch == 0 and epoch > 0: + if epoch % train_params['save_epoch == 0'] and epoch > 0: if loss_total.average <= 2.: - saver_to_save.save(sess, args.save_dir + 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr)) + saver_to_save.save(sess, os.path.join(train_params['trained_model_name'], + 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr))) # switch to validation dataset for evaluation - if epoch % args.val_evaluation_epoch == 0 and epoch >= args.warm_up_epoch: + if epoch % train_params['val_evaluation_epoch'] == 0 and epoch >= train_params['warm_up_epoch']: sess.run(val_init_op) val_loss_total, val_loss_xy, val_loss_wh, val_loss_conf, val_loss_class = \ @@ -190,7 +202,7 @@ val_preds = [] - for j in trange(args.val_img_cnt): + for j in trange(val_img_cnt): __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], feed_dict={is_training: False}) pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) @@ -203,12 +215,13 @@ # calc mAP rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() - gt_dict = parse_gt_rec(args.val_file, args.img_size, args.letterbox_resize) + gt_dict = parse_gt_rec(val_data_path, [416, 416], letterbox_resize=True) info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) - for ii in range(args.class_num): - npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=args.eval_threshold, use_07_metric=args.use_voc_07_metric) + for ii in range(number_classes): + npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=train_params['eval_threshold'], + use_07_metric=False) info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) rec_total.update(rec, npos) prec_total.update(prec, nd) @@ -223,8 +236,8 @@ if mAP > best_mAP: best_mAP = mAP - saver_best.save(sess, args.save_dir + 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format( - epoch, int(__global_step), best_mAP, val_loss_total.average, __lr)) + saver_best.save(sess, os.path.join(train_params['trained_model_name'], + 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr)))_ writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), global_step=epoch) @@ -234,27 +247,3 @@ writer.add_summary(make_summary('validation_statistics/loss_wh', val_loss_wh.average), global_step=epoch) writer.add_summary(make_summary('validation_statistics/loss_conf', val_loss_conf.average), global_step=epoch) writer.add_summary(make_summary('validation_statistics/loss_class', val_loss_class.average), global_step=epoch) - - -### Load and finetune -# Choose the parts you want to restore the weights. List form. -# restore_include: None, restore_exclude: None => restore the whole model -# restore_include: None, restore_exclude: scope => restore the whole model except `scope` -# restore_include: scope1, restore_exclude: scope2 => if scope1 contains scope2, restore scope1 and not restore scope2 (scope1 - scope2) -# choise 1: only restore the darknet body -# restore_include = ['yolov3/darknet53_body'] -# restore_exclude = None -# choise 2: restore all layers except the last 3 conv2d layers in 3 scale - restore_include = None - restore_exclude = ['yolov3/yolov3_head/Conv_14', 'yolov3/yolov3_head/Conv_6', 'yolov3/yolov3_head/Conv_22'] -# Choose the parts you want to finetune. List form. -# Set to None to train the whole model. - update_part = ['yolov3/yolov3_head'] - -### other training strategies - multi_scale_train = True # Whether to apply multi-scale training strategy. Image size varies from [320, 320] to [640, 640] by default. - use_label_smooth = True # Whether to use class label smoothing strategy. - use_focal_loss = True # Whether to apply focal loss on the conf loss. - use_mix_up = True # Whether to use mix up data augmentation strategy. - use_warm_up = True # whether to use warm up strategy to prevent from gradient exploding. - warm_up_epoch = 3 # Warm up training epoches. Set to a larger value if gradient explodes. \ No newline at end of file From d0cb8c2770cdca37656ba2e9b0ed423823afc9e8 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 18:46:41 +0100 Subject: [PATCH 19/65] added lines to save a text file to be referenced --- .../transfer_learning/training_data_loader.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index d000127..a1cea0c 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -54,7 +54,7 @@ def load_data_from_s3(self): xs += x ys += y - self.clear_temp_folders() + # self.clear_temp_folders() return xs, ys @@ -81,10 +81,13 @@ def load_detrac_data(self): file_to_download = paths['s3_detrac_images'] + \ folder + '/' + \ 'img' + image_num + '.jpg' + print(file_to_download) download_file_to = paths['temp_raw_images'] + \ folder + '_' + \ image_num + '.jpg' + print(download_file_to) + self.data_loader_s3.download_file( path_of_file_to_download=file_to_download, path_to_download_file_to=download_file_to) @@ -243,9 +246,18 @@ def parse_cvat_xml_file(self, xml_file): else: return None + paths = load_paths() creds = load_credentials() dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) -print('Done') + +saved_text_files_dir = paths['temp_annotation'] +with open(saved_text_files_dir + 'train.txt', 'w') as f: + for item in y_train: + f.write("%s\n" % item) + +with open(saved_text_files_dir + 'test.txt', 'w') as f: + for item in y_test: + f.write("%s\n" % item) \ No newline at end of file From 6df30f22e2310beeccd2b327158e6eb6f01a45fd Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 18:47:31 +0100 Subject: [PATCH 20/65] fixed typo in epochs --- conf/base/training_parameters.yml | 2 +- .../train_tensorflow_model.py | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index 3215e89..fc07916 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,7 +1,7 @@ training: batch_size : 6 letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. - total_epoches : 100 + total_epochs : 100 train_evaluation_step : 100 # Evaluate on the training batch after some steps. val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. save_epoch : 10 # Save the model after some epochs. diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 61691e3..478a643 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -27,9 +27,9 @@ number_classes = len(classes) train_data_path = os.path.join(train_dir_path, 'training_data.txt') -val_data_path = os.path.join(train_dir_path, 'val_data.txt') +test_data_path = os.path.join(train_dir_path, 'test_data.txt') train_img_cnt = len(open(train_data_path, 'r').readlines()) -val_img_cnt = len(open(val_data_path, 'r').readlines()) +val_img_cnt = len(open(test_data_path, 'r').readlines()) train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['batch_size'])) lr_decay_freq = int(train_batch_num * train_params['lr_decay_epoch']) @@ -58,18 +58,18 @@ num_parallel_calls=train_params['num_threads']) train_dataset = train_dataset.prefetch(train_params['prefetech_buffer']) -val_dataset = tf.data.TextLineDataset(val_data_path) -val_dataset = val_dataset.batch(1) -val_dataset = val_dataset.map( +test_dataset = tf.data.TextLineDataset(test_data_path) +test_dataset = test_dataset.batch(1) +test_dataset = test_dataset.map( lambda x: tf.py_func(get_batch_data, inp=[x, number_classes, [416, 416], anchors, 'val', False, False, True], Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), num_parallel_calls=train_params['num_threads']) -val_dataset.prefetch(train_params['prefetech_buffer']) +test_dataset.prefetch(train_params['prefetech_buffer']) iterator = tf.data.Iterator.from_structure(train_dataset.output_types, train_dataset.output_shapes) train_init_op = iterator.make_initializer(train_dataset) -val_init_op = iterator.make_initializer(val_dataset) +val_init_op = iterator.make_initializer(test_dataset) # get an element from the chosen dataset iterator image_ids, image, y_true_13, y_true_26, y_true_52 = iterator.get_next() @@ -148,7 +148,7 @@ best_mAP = -np.Inf - for epoch in range(train_params['total_epoches']): + for epoch in range(train_params['total_epochs']): sess.run(train_init_op) loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), \ @@ -215,7 +215,7 @@ # calc mAP rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() - gt_dict = parse_gt_rec(val_data_path, [416, 416], letterbox_resize=True) + gt_dict = parse_gt_rec(test_data_path, [416, 416], letterbox_resize=True) info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) From d247bc338ab7478906fae0f335f704b686bf0bf0 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 18:49:16 +0100 Subject: [PATCH 21/65] added missing package --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9a1a2ed..a8d30f8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,4 +19,5 @@ awscli>=1.16.198, <2.0 sqlalchemy pandas==0.24.2 psycopg2 -seaborn>=0.9 \ No newline at end of file +seaborn>=0.9 +tqdm \ No newline at end of file From 23028eee93896f9c4aa63e72d0215e74d8e4a21d Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Sun, 11 Aug 2019 20:08:40 +0100 Subject: [PATCH 22/65] formatted txt truth file so it fits requisite format for trainng --- .../d04_modelling/transfer_learning/test.py | 0 .../train_tensorflow_model.py | 15 +++---- .../transfer_learning/training_data_loader.py | 39 +++++++++++++------ 3 files changed, 35 insertions(+), 19 deletions(-) create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/test.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test.py b/src/traffic_analysis/d04_modelling/transfer_learning/test.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 478a643..97dc894 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -14,20 +14,21 @@ evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d00_utils.load_confs import load_parameters, load_paths, load_training_parameters -from traffic_analysis.d04_modelling.perform_detection_tensorflow import parse_anchors, read_class_names +from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names params = load_parameters() train_params = load_training_parameters() paths = load_paths() -train_dir_path = paths['training'] +truth_dir_path = paths['temp_annotation'] class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') # CHANGE THIS classes = read_class_names(class_name_path) anchors = parse_anchors(paths) number_classes = len(classes) -train_data_path = os.path.join(train_dir_path, 'training_data.txt') -test_data_path = os.path.join(train_dir_path, 'test_data.txt') +train_data_path = os.path.join(truth_dir_path, 'train.txt') +test_data_path = os.path.join(truth_dir_path, 'test.txt') train_img_cnt = len(open(train_data_path, 'r').readlines()) val_img_cnt = len(open(test_data_path, 'r').readlines()) train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['batch_size'])) @@ -37,7 +38,7 @@ if train_params['lr_type'] == 'piecewise': pw_boundaries = [float(i) * train_batch_num + train_params['global_step'] for i in train_params['pw_boundaries']] -logging_file_path = os.path.join(train_dir_path, 'progress.log') +logging_file_path = os.path.join(truth_dir_path, 'progress.log') logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=logging_file_path, filemode='w') @@ -48,7 +49,7 @@ gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, number_classes, train_params['nms_topk'], train_params['score_threshold'], train_params['nms_threshold']) -train_dataset = tf.data.TextLineDataset(train_dir_path) +train_dataset = tf.data.TextLineDataset(train_data_path) train_dataset = train_dataset.shuffle(train_img_cnt) train_dataset = train_dataset.batch(train_params['batch_size']) train_dataset = train_dataset.map( @@ -136,7 +137,7 @@ saver_to_save = tf.train.Saver() saver_best = tf.train.Saver() -tensorboard_log_path = os.path.join(train_dir_path, 'tensorboard_logs') +tensorboard_log_path = os.path.join(truth_dir_path, 'tensorboard_logs') yolov3_tensorflow_path = os.path.join(paths['local_detection_model'], params['detection_model'], 'yolov3.ckpt') with tf.Session() as sess: sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index a1cea0c..46edb93 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -1,3 +1,4 @@ +import os import xml.etree.ElementTree as ET from PIL import Image import numpy as np @@ -7,6 +8,8 @@ from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir, mp4_to_npy from traffic_analysis.d02_ref.ref_utils import get_s3_video_path_from_xml_name +from traffic_analysis.d00_utils.get_project_directory import get_project_directory +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names class TransferDataset(Enum): @@ -77,17 +80,17 @@ def load_detrac_data(self): x = [] for labels in y: image_num = labels.split(' ')[0].zfill(5) - folder = labels.split(' ')[1] + impath = labels.split(' ')[1] + folder = impath.split('/')[-1][:9] + file_to_download = paths['s3_detrac_images'] + \ folder + '/' + \ 'img' + image_num + '.jpg' - print(file_to_download) + download_file_to = paths['temp_raw_images'] + \ folder + '_' + \ image_num + '.jpg' - print(download_file_to) - self.data_loader_s3.download_file( path_of_file_to_download=file_to_download, path_to_download_file_to=download_file_to) @@ -100,15 +103,20 @@ def load_detrac_data(self): def parse_detrac_xml_file(self, xml_file): - path = self.paths['temp_annotation'] + xml_file.split('/')[-1] + project_dir = get_project_directory() + image_dir = os.path.join(project_dir, self.paths['temp_raw_images']) + xml_path = self.paths['temp_annotation'] + xml_file.split('/')[-1] + + class_names_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') + classes = read_class_names(class_names_path) try: self.data_loader_s3.download_file(path_of_file_to_download=xml_file, - path_to_download_file_to=path) + path_to_download_file_to=xml_path) except: print("Could not download file " + xml_file) - root = ET.parse(path).getroot() + root = ET.parse(xml_path).getroot() results = [] # [image_index @@ -121,12 +129,12 @@ def parse_detrac_xml_file(self, xml_file): # x_max, # y_max] - im_path = path.split('/')[-1][:-4] - im_width = 250 - im_height = 250 + im_width = 960 + im_height = 540 for track in root.iter('frame'): - + frame_str = str(track.attrib['num']).zfill(5) + im_path = os.path.join(image_dir, xml_path[:-4] + '_' + frame_str + '.jpg') result = str(track.attrib['num']) + \ ' ' + str(im_path) + \ ' ' + str(im_width) + \ @@ -134,6 +142,12 @@ def parse_detrac_xml_file(self, xml_file): for frame_obj in track.iter('target'): vehicle_type = frame_obj.find('attribute').attrib['vehicle_type'] + if vehicle_type == 'van': + vehicle_type_idx = 2 # say vans are cars because we don't distinguish + else: + for tick in range(len(classes)): + if classes[tick] == vehicle_type: + vehicle_type_idx = tick left = float(frame_obj.find('box').attrib['left']) top = float(frame_obj.find('box').attrib['top']) @@ -145,13 +159,14 @@ def parse_detrac_xml_file(self, xml_file): x_max = left + width y_max = top + height - result += ' ' + str(vehicle_type) + \ + result += ' ' + str(vehicle_type_idx) + \ ' ' + str(x_min) + \ ' ' + str(y_min) + \ ' ' + str(x_max) + \ ' ' + str(y_max) results.append(result) + print(result) if len(results) > 1: return results From e1b71a0d4f814f15f7b6e276665b03f156175962 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 00:01:39 +0100 Subject: [PATCH 23/65] made changes to account for bug fixes --- conf/base/training_parameters.yml | 8 +- .../tensorflow_model_loader.py | 180 ++++++++++++++++++ .../tensorflow_training_utils.py | 39 ++-- .../train_tensorflow_model.py | 21 +- .../transfer_learning/training_data_loader.py | 5 +- 5 files changed, 221 insertions(+), 32 deletions(-) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index fc07916..e6c25e4 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -6,7 +6,7 @@ training: val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. save_epoch : 10 # Save the model after some epochs. batch_norm_decay : 0.99 # decay in bn ops - weight_decay : 5e-4 # l2 weight decay + weight_decay : 0.0005 # l2 weight decay global_step : 0 # used when resuming training warm_up_epoch : 3 # set to larger value if gradient explodes num_threads : 10 # Number of threads for image processing used in tf.data pipeline. @@ -15,13 +15,13 @@ training: learning: optimizer_name : 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] save_optimizer : True # Whether to save the optimizer parameters into the checkpoint file. - learning_rate_init : 1e-4 + learning_rate_init : 0.0001 lr_type : 'piecewise' # Chosen from [fixed, exponential, cosine_decay, cosine_decay_restart, piecewise] lr_decay_epoch : 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` and `cosine_decay_restart` lr_type. lr_decay_factor : 0.96 # The learning rate decay factor. Used when chosen `exponential` lr_type. - lr_lower_bound : 1e-6 # The minimum learning rate. + lr_lower_bound : 0.000001 # The minimum learning rate. pw_boundaries : [30, 50] # epoch based boundaries - pw_values : [learning_rate_init, 3e-5, 1e-5] + pw_values : [0.0001, 0.00003, 0.00001] # FIRST VALUE MUST BE LEARNING_RATE_INIT validation: # nms diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_model_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_model_loader.py index df50136..b77466b 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_model_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_model_loader.py @@ -193,6 +193,186 @@ def _reshape(result): return boxes, confs, probs + def loss_layer(self, feature_map_i, y_true, anchors): + ''' + calc loss function from a certain scale + input: + feature_map_i: feature maps of a certain scale. shape: [N, 13, 13, 3*(5 + num_class)] etc. + y_true: y_ture from a certain scale. shape: [N, 13, 13, 3, 5 + num_class + 1] etc. + anchors: shape [9, 2] + ''' + + # size in [h, w] format! don't get messed up! + grid_size = tf.shape(feature_map_i)[1:3] + # the downscale ratio in height and weight + ratio = tf.cast(self.img_size / grid_size, tf.float32) + # N: batch_size + N = tf.cast(tf.shape(feature_map_i)[0], tf.float32) + + x_y_offset, pred_boxes, pred_conf_logits, pred_prob_logits = self.reorg_layer(feature_map_i, anchors) + + ########### + # get mask + ########### + + # shape: take 416x416 input image and 13*13 feature_map for example: + # [N, 13, 13, 3, 1] + object_mask = y_true[..., 4:5] + + # the calculation of ignore mask if referred from + # https://github.com/pjreddie/darknet/blob/master/src/yolo_layer.c#L179 + ignore_mask = tf.TensorArray(tf.float32, size=0, dynamic_size=True) + + def loop_cond(idx, ignore_mask): + return tf.less(idx, tf.cast(N, tf.int32)) + + def loop_body(idx, ignore_mask): + # shape: [13, 13, 3, 4] & [13, 13, 3] ==> [V, 4] + # V: num of true gt box of each image in a batch + valid_true_boxes = tf.boolean_mask(y_true[idx, ..., 0:4], tf.cast(object_mask[idx, ..., 0], 'bool')) + # shape: [13, 13, 3, 4] & [V, 4] ==> [13, 13, 3, V] + iou = self.box_iou(pred_boxes[idx], valid_true_boxes) + # shape: [13, 13, 3] + best_iou = tf.reduce_max(iou, axis=-1) + # shape: [13, 13, 3] + ignore_mask_tmp = tf.cast(best_iou < 0.5, tf.float32) + # finally will be shape: [N, 13, 13, 3] + ignore_mask = ignore_mask.write(idx, ignore_mask_tmp) + return idx + 1, ignore_mask + + _, ignore_mask = tf.while_loop(cond=loop_cond, body=loop_body, loop_vars=[0, ignore_mask]) + ignore_mask = ignore_mask.stack() + # shape: [N, 13, 13, 3, 1] + ignore_mask = tf.expand_dims(ignore_mask, -1) + + # shape: [N, 13, 13, 3, 2] + pred_box_xy = pred_boxes[..., 0:2] + pred_box_wh = pred_boxes[..., 2:4] + + # get xy coordinates in one cell from the feature_map + # numerical range: 0 ~ 1 + # shape: [N, 13, 13, 3, 2] + true_xy = y_true[..., 0:2] / ratio[::-1] - x_y_offset + pred_xy = pred_box_xy / ratio[::-1] - x_y_offset + + # get_tw_th + # numerical range: 0 ~ 1 + # shape: [N, 13, 13, 3, 2] + true_tw_th = y_true[..., 2:4] / anchors + pred_tw_th = pred_box_wh / anchors + # for numerical stability + true_tw_th = tf.where(condition=tf.equal(true_tw_th, 0), + x=tf.ones_like(true_tw_th), y=true_tw_th) + pred_tw_th = tf.where(condition=tf.equal(pred_tw_th, 0), + x=tf.ones_like(pred_tw_th), y=pred_tw_th) + true_tw_th = tf.log(tf.clip_by_value(true_tw_th, 1e-9, 1e9)) + pred_tw_th = tf.log(tf.clip_by_value(pred_tw_th, 1e-9, 1e9)) + + # box size punishment: + # box with smaller area has bigger weight. This is taken from the yolo darknet C source code. + # shape: [N, 13, 13, 3, 1] + box_loss_scale = 2. - (y_true[..., 2:3] / tf.cast(self.img_size[1], tf.float32)) * ( + y_true[..., 3:4] / tf.cast(self.img_size[0], tf.float32)) + + ############ + # loss_part + ############ + # mix_up weight + # [N, 13, 13, 3, 1] + mix_w = y_true[..., -1:] + # shape: [N, 13, 13, 3, 1] + xy_loss = tf.reduce_sum(tf.square(true_xy - pred_xy) * object_mask * box_loss_scale * mix_w) / N + wh_loss = tf.reduce_sum(tf.square(true_tw_th - pred_tw_th) * object_mask * box_loss_scale * mix_w) / N + + # shape: [N, 13, 13, 3, 1] + conf_pos_mask = object_mask + conf_neg_mask = (1 - object_mask) * ignore_mask + conf_loss_pos = conf_pos_mask * tf.nn.sigmoid_cross_entropy_with_logits(labels=object_mask, + logits=pred_conf_logits) + conf_loss_neg = conf_neg_mask * tf.nn.sigmoid_cross_entropy_with_logits(labels=object_mask, + logits=pred_conf_logits) + # TODO: may need to balance the pos-neg by multiplying some weights + conf_loss = conf_loss_pos + conf_loss_neg + if self.use_focal_loss: + alpha = 1.0 + gamma = 2.0 + # TODO: alpha should be a mask array if needed + focal_mask = alpha * tf.pow(tf.abs(object_mask - tf.sigmoid(pred_conf_logits)), gamma) + conf_loss *= focal_mask + conf_loss = tf.reduce_sum(conf_loss * mix_w) / N + + # shape: [N, 13, 13, 3, 1] + # whether to use label smooth + if self.use_label_smooth: + delta = 0.01 + label_target = (1 - delta) * y_true[..., 5:-1] + delta * 1. / self.class_num + else: + label_target = y_true[..., 5:-1] + class_loss = object_mask * tf.nn.sigmoid_cross_entropy_with_logits(labels=label_target, + logits=pred_prob_logits) * mix_w + class_loss = tf.reduce_sum(class_loss) / N + + return xy_loss, wh_loss, conf_loss, class_loss + + def box_iou(self, pred_boxes, valid_true_boxes): + ''' + param: + pred_boxes: [13, 13, 3, 4], (center_x, center_y, w, h) + valid_true: [V, 4] + ''' + + # [13, 13, 3, 2] + pred_box_xy = pred_boxes[..., 0:2] + pred_box_wh = pred_boxes[..., 2:4] + + # shape: [13, 13, 3, 1, 2] + pred_box_xy = tf.expand_dims(pred_box_xy, -2) + pred_box_wh = tf.expand_dims(pred_box_wh, -2) + + # [V, 2] + true_box_xy = valid_true_boxes[:, 0:2] + true_box_wh = valid_true_boxes[:, 2:4] + + # [13, 13, 3, 1, 2] & [V, 2] ==> [13, 13, 3, V, 2] + intersect_mins = tf.maximum(pred_box_xy - pred_box_wh / 2., + true_box_xy - true_box_wh / 2.) + intersect_maxs = tf.minimum(pred_box_xy + pred_box_wh / 2., + true_box_xy + true_box_wh / 2.) + intersect_wh = tf.maximum(intersect_maxs - intersect_mins, 0.) + + # shape: [13, 13, 3, V] + intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1] + # shape: [13, 13, 3, 1] + pred_box_area = pred_box_wh[..., 0] * pred_box_wh[..., 1] + # shape: [V] + true_box_area = true_box_wh[..., 0] * true_box_wh[..., 1] + # shape: [1, V] + true_box_area = tf.expand_dims(true_box_area, axis=0) + + # [13, 13, 3, V] + iou = intersect_area / (pred_box_area + true_box_area - intersect_area + 1e-10) + + return iou + + def compute_loss(self, y_pred, y_true): + ''' + param: + y_pred: returned feature_map list by `forward` function: [feature_map_1, feature_map_2, feature_map_3] + y_true: input y_true by the tf.data pipeline + ''' + loss_xy, loss_wh, loss_conf, loss_class = 0., 0., 0., 0. + anchor_group = [self.anchors[6:9], self.anchors[3:6], self.anchors[0:3]] + + # calc loss in 3 scales + for i in range(len(y_pred)): + result = self.loss_layer(y_pred[i], y_true[i], anchor_group[i]) + loss_xy += result[0] + loss_wh += result[1] + loss_conf += result[2] + loss_class += result[3] + total_loss = loss_xy + loss_wh + loss_conf + loss_class + return [total_loss, loss_xy, loss_wh, loss_conf, loss_class] + def conv2d(inputs, filters, kernel_size, strides=1): def _fixed_padding(inputs, kernel_size): diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index f3ac843..da0cef0 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -8,6 +8,8 @@ from tensorflow.core.framework import summary_pb2 from collections import Counter +from traffic_analysis.d00_utils.load_confs import load_training_parameters + PY_VERSION = sys.version_info[0] iter_cnt = 0 @@ -600,23 +602,26 @@ def make_summary(name, val): return summary_pb2.Summary(value=[summary_pb2.Summary.Value(tag=name, simple_value=val)]) -def config_learning_rate(args, global_step): - if args.lr_type == 'exponential': - lr_tmp = tf.train.exponential_decay(args.learning_rate_init, global_step, args.lr_decay_freq, - args.lr_decay_factor, staircase=True, name='exponential_learning_rate') - return tf.maximum(lr_tmp, args.lr_lower_bound) - elif args.lr_type == 'cosine_decay': - train_steps = (args.total_epoches - float(args.use_warm_up) * args.warm_up_epoch) * args.train_batch_num - return args.lr_lower_bound + 0.5 * (args.learning_rate_init - args.lr_lower_bound) * \ - (1 + tf.cos(global_step / train_steps * np.pi)) - elif args.lr_type == 'cosine_decay_restart': - return tf.train.cosine_decay_restarts(args.learning_rate_init, global_step, - args.lr_decay_freq, t_mul=2.0, m_mul=1.0, - name='cosine_decay_learning_rate_restart') - elif args.lr_type == 'fixed': - return tf.convert_to_tensor(args.learning_rate_init, name='fixed_learning_rate') - elif args.lr_type == 'piecewise': - return tf.train.piecewise_constant(global_step, boundaries=args.pw_boundaries, values=args.pw_values, +def config_learning_rate(lr_decay_freq, global_step): + train_params = load_training_parameters() + if train_params['lr_type'] == 'exponential': + lr_tmp = tf.train.exponential_decay(train_params['learning_rate_init'], global_step, lr_decay_freq, + train_params['lr_decay_factor'], staircase=True, + name='exponential_learning_rate') + return tf.maximum(lr_tmp, train_params['lr_lower_bound']) + # elif train_params['lr_type'] == 'cosine_decay': + # train_steps = (train_params['total_epoches'] - float(args.use_warm_up) * args.warm_up_epoch) * args.train_batch_num + # return args.lr_lower_bound + 0.5 * (args.learning_rate_init - args.lr_lower_bound) * \ + # (1 + tf.cos(global_step / train_steps * np.pi)) + # elif args.lr_type == 'cosine_decay_restart': + # return tf.train.cosine_decay_restarts(args.learning_rate_init, global_step, + # args.lr_decay_freq, t_mul=2.0, m_mul=1.0, + # name='cosine_decay_learning_rate_restart') + # elif args.lr_type == 'fixed': + # return tf.convert_to_tensor(args.learning_rate_init, name='fixed_learning_rate') + elif train_params['lr_type'] == 'piecewise': + return tf.train.piecewise_constant(global_step, boundaries=[float(i) for i in train_params['pw_boundaries']], + values=train_params['pw_values'], name='piecewise_learning_rate') else: raise ValueError('Unsupported learning rate type!') diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 97dc894..2c0f9c8 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -87,7 +87,7 @@ batch_norm_decay=train_params['batch_norm_decay'], weight_decay=train_params['weight_decay'], use_static_shape=False) -with tf.variable_scope('yolov3'): +with tf.variable_scope('YoloV3'): pred_feature_maps = yolo_model.forward(image, is_training=is_training) loss = yolo_model.compute_loss(pred_feature_maps, y_true) y_pred = yolo_model.predict(pred_feature_maps) @@ -95,8 +95,11 @@ l2_loss = tf.losses.get_regularization_loss() # setting restore parts and vars to update -saver_to_restore = tf.train.Saver(var_list=tf.contrib.framework.get_variables_to_restore(include=None, exclude=None)) -update_vars = tf.contrib.framework.get_variables_to_restore(include=['yolov3/yolov3_head']) +saver_to_restore = tf.train.Saver( + var_list=tf.contrib.framework.get_variables_to_restore( + include=None, + exclude=['YoloV3/yolov3_head/Conv_14', 'YoloV3/yolov3_head/Conv_6', 'YoloV3/yolov3_head/Conv_22'])) +update_vars = tf.contrib.framework.get_variables_to_restore(include=['YoloV3/yolov3_head']) tf.summary.scalar('train_batch_statistics/total_loss', loss[0]) tf.summary.scalar('train_batch_statistics/loss_xy', loss[1]) @@ -109,11 +112,11 @@ global_step = tf.Variable(float(train_params['global_step']), trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES]) -learning_rate = tf.cond(tf.less(global_step, train_params['train_batch_num'] * train_params['warm_up_epoch']), +learning_rate = tf.cond(tf.less(global_step, train_batch_num * train_params['warm_up_epoch']), lambda: train_params['learning_rate_init'] * - global_step / (train_params['train_batch_num'] * train_params['warm_up_epoch']), - lambda: config_learning_rate(global_step - - train_params['train_batch_num'] * train_params['warm_up_epoch'])) + global_step / (train_batch_num * train_params['warm_up_epoch']), + lambda: config_learning_rate(lr_decay_freq=lr_decay_freq, global_step=global_step - + train_batch_num * train_params['warm_up_epoch'])) tf.summary.scalar('learning_rate', learning_rate) if not train_params['save_optimizer']: @@ -155,7 +158,7 @@ loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), \ AverageMeter(), AverageMeter() - for i in trange(train_params['train_batch_num']): + for i in trange(train_batch_num): _, summary, __y_pred, __y_true, __loss, __global_step, __lr = sess.run( [train_op, merged, y_pred, y_true, loss, global_step, learning_rate], feed_dict={is_training: True}) @@ -238,7 +241,7 @@ if mAP > best_mAP: best_mAP = mAP saver_best.save(sess, os.path.join(train_params['trained_model_name'], - 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr)))_ + 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr))) writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), global_step=epoch) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 46edb93..811ee35 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -105,6 +105,8 @@ def parse_detrac_xml_file(self, xml_file): project_dir = get_project_directory() image_dir = os.path.join(project_dir, self.paths['temp_raw_images']) + + xml_file_name = xml_file.split('/')[-1] xml_path = self.paths['temp_annotation'] + xml_file.split('/')[-1] class_names_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') @@ -134,7 +136,7 @@ def parse_detrac_xml_file(self, xml_file): for track in root.iter('frame'): frame_str = str(track.attrib['num']).zfill(5) - im_path = os.path.join(image_dir, xml_path[:-4] + '_' + frame_str + '.jpg') + im_path = os.path.join(image_dir, xml_file_name[:-4] + '_' + frame_str + '.jpg') result = str(track.attrib['num']) + \ ' ' + str(im_path) + \ ' ' + str(im_width) + \ @@ -166,7 +168,6 @@ def parse_detrac_xml_file(self, xml_file): ' ' + str(y_max) results.append(result) - print(result) if len(results) > 1: return results From b52a90a0027b8586593cbf944504228bb4edf373 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 00:02:14 +0100 Subject: [PATCH 24/65] legacy scripts kept in for the time being --- .../d04_modelling/transfer_learning/args.py | 88 +++++++++++++++++++ .../d04_modelling/transfer_learning/test.py | 4 + 2 files changed, 92 insertions(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/args.py b/src/traffic_analysis/d04_modelling/transfer_learning/args.py index e69de29..f7d9cd9 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/args.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/args.py @@ -0,0 +1,88 @@ +# coding: utf-8 +# This file contains the parameter used in train_tensorflow_model.py + +from __future__ import division, print_function + +from utils.misc_utils import parse_anchors, read_class_names +import math + +### Some paths +train_file = './data/my_data/train.txt' # The path of the training txt file. +val_file = './data/my_data/val.txt' # The path of the validation txt file. +restore_path = './data/darknet_weights/yolov3.ckpt' # The path of the weights to restore. +save_dir = './checkpoint/' # The directory of the weights to save. +log_dir = './data/logs/' # The directory to store the tensorboard log files. +progress_log_path = './data/progress.log' # The path to record the training progress. +anchor_path = './data/yolo_anchors.txt' # The path of the anchor txt file. +class_name_path = './data/coco.names' # The path of the class names. + +### Training releated numbers +batch_size = 6 +img_size = [416, 416] # Images will be resized to `img_size` and fed to the network, size format: [width, height] +letterbox_resize = True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. +total_epoches = 100 +train_evaluation_step = 100 # Evaluate on the training batch after some steps. +val_evaluation_epoch = 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. +save_epoch = 10 # Save the model after some epochs. +batch_norm_decay = 0.99 # decay in bn ops +weight_decay = 5e-4 # l2 weight decay +global_step = 0 # used when resuming training + +### tf.data parameters +num_threads = 10 # Number of threads for image processing used in tf.data pipeline. +prefetech_buffer = 5 # Prefetech_buffer used in tf.data pipeline. + +### Learning rate and optimizer +optimizer_name = 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] +save_optimizer = True # Whether to save the optimizer parameters into the checkpoint file. +learning_rate_init = 1e-4 +lr_type = 'piecewise' # Chosen from [fixed, exponential, cosine_decay, cosine_decay_restart, piecewise] +lr_decay_epoch = 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` and `cosine_decay_restart` lr_type. +lr_decay_factor = 0.96 # The learning rate decay factor. Used when chosen `exponential` lr_type. +lr_lower_bound = 1e-6 # The minimum learning rate. +# only used in piecewise lr type +pw_boundaries = [30, 50] # epoch based boundaries +pw_values = [learning_rate_init, 3e-5, 1e-5] + +### Load and finetune +# Choose the parts you want to restore the weights. List form. +# restore_include: None, restore_exclude: None => restore the whole model +# restore_include: None, restore_exclude: scope => restore the whole model except `scope` +# restore_include: scope1, restore_exclude: scope2 => if scope1 contains scope2, restore scope1 and not restore scope2 (scope1 - scope2) +# choise 1: only restore the darknet body +# restore_include = ['yolov3/darknet53_body'] +# restore_exclude = None +# choise 2: restore all layers except the last 3 conv2d layers in 3 scale +restore_include = None +restore_exclude = ['yolov3/yolov3_head/Conv_14', 'yolov3/yolov3_head/Conv_6', 'yolov3/yolov3_head/Conv_22'] +# Choose the parts you want to finetune. List form. +# Set to None to train the whole model. +update_part = ['yolov3/yolov3_head'] + +### other training strategies +multi_scale_train = True # Whether to apply multi-scale training strategy. Image size varies from [320, 320] to [640, 640] by default. +use_label_smooth = True # Whether to use class label smoothing strategy. +use_focal_loss = True # Whether to apply focal loss on the conf loss. +use_mix_up = True # Whether to use mix up data augmentation strategy. +use_warm_up = True # whether to use warm up strategy to prevent from gradient exploding. +warm_up_epoch = 3 # Warm up training epoches. Set to a larger value if gradient explodes. + +### some constants in validation +# nms +nms_threshold = 0.45 # iou threshold in nms operation +score_threshold = 0.01 # threshold of the probability of the classes in nms operation, i.e. score = pred_confs * pred_probs. set lower for higher recall. +nms_topk = 150 # keep at most nms_topk outputs after nms +# mAP eval +eval_threshold = 0.5 # the iou threshold applied in mAP evaluation +use_voc_07_metric = False # whether to use voc 2007 evaluation metric, i.e. the 11-point metric + +### parse some params +anchors = parse_anchors(anchor_path) +classes = read_class_names(class_name_path) +class_num = len(classes) +train_img_cnt = len(open(train_file, 'r').readlines()) +val_img_cnt = len(open(val_file, 'r').readlines()) +train_batch_num = int(math.ceil(float(train_img_cnt) / batch_size)) + +lr_decay_freq = int(train_batch_num * lr_decay_epoch) +pw_boundaries = [float(i) * train_batch_num + global_step for i in pw_boundaries] \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test.py b/src/traffic_analysis/d04_modelling/transfer_learning/test.py index e69de29..7f7790a 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/test.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/test.py @@ -0,0 +1,4 @@ +from traffic_analysis.d00_utils.get_project_directory import get_project_directory + +print(get_project_directory()) +print(str(1).zfill(5)) \ No newline at end of file From 260deb03a151bd3a51b23564f897cd8ca0e19d8b Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 10:09:43 +0100 Subject: [PATCH 25/65] added paths so that sys is updated --- .../d04_modelling/transfer_learning/train_tensorflow_model.py | 4 ++++ .../d04_modelling/transfer_learning/training_data_loader.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 2c0f9c8..ab33d7c 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -2,6 +2,7 @@ from __future__ import division, print_function import os +import sys import math import tensorflow as tf import numpy as np @@ -9,6 +10,9 @@ from tqdm import trange import random +ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..') +sys.path.append(ospath) + from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 811ee35..6f9d13c 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -1,9 +1,13 @@ import os +import sys import xml.etree.ElementTree as ET from PIL import Image import numpy as np from enum import Enum +ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..') +sys.path.append(ospath) + from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir, mp4_to_npy From 9dae6206244f313c8715ca78648d78b1c49ef0aa Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 11:16:41 +0100 Subject: [PATCH 26/65] changed dir --- .../d04_modelling/transfer_learning/train_tensorflow_model.py | 2 +- .../d04_modelling/transfer_learning/training_data_loader.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index ab33d7c..90a3f61 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -10,7 +10,7 @@ from tqdm import trange import random -ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..') +ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../..') sys.path.append(ospath) from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 6f9d13c..02f81fd 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -5,7 +5,7 @@ import numpy as np from enum import Enum -ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..') +ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../..') sys.path.append(ospath) from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials From 8c7ca7a17872a23176e06517af0e8bc5c5792fe1 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 11:59:18 +0100 Subject: [PATCH 27/65] new transfer_learning pipeline script --- src/run_transfer_learning.py | 28 +++++++++++++++++++ .../train_tensorflow_model.py | 3 -- 2 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 src/run_transfer_learning.py diff --git a/src/run_transfer_learning.py b/src/run_transfer_learning.py new file mode 100644 index 0000000..85af946 --- /dev/null +++ b/src/run_transfer_learning.py @@ -0,0 +1,28 @@ +from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials, \ + load_parameters, load_training_parameters +from traffic_analysis.d04_modelling.transfer_learning.training_data_loader import DataLoader, TransferDataset +from traffic_analysis.d04_modelling.transfer_learning.train_tensorflow_model import transfer_learn + +paths = load_paths() +creds = load_credentials() +params = load_parameters() +train_params = load_training_parameters() + +dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) +x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) + +print('---- parsing xml files and downloading to temp ----') +saved_text_files_dir = paths['temp_annotation'] +with open(saved_text_files_dir + 'train.txt', 'w') as f: + for item in y_train: + f.write("%s\n" % item) + +with open(saved_text_files_dir + 'test.txt', 'w') as f: + for item in y_test: + f.write("%s\n" % item) + +transfer_learn(paths=paths, + params=params, + train_params=train_params, + train_file='train.txt', + test_file='test.txt') \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 90a3f61..1168e5c 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -10,9 +10,6 @@ from tqdm import trange import random -ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../..') -sys.path.append(ospath) - from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms From 97aba353386bc7b77789fea35492d1c8d36242dc Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 11:59:41 +0100 Subject: [PATCH 28/65] fixed a quick bug with the configuration of the learning rate --- .../transfer_learning/tensorflow_training_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index da0cef0..9b41dfd 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -602,7 +602,7 @@ def make_summary(name, val): return summary_pb2.Summary(value=[summary_pb2.Summary.Value(tag=name, simple_value=val)]) -def config_learning_rate(lr_decay_freq, global_step): +def config_learning_rate(lr_decay_freq, train_batch_num, global_step): train_params = load_training_parameters() if train_params['lr_type'] == 'exponential': lr_tmp = tf.train.exponential_decay(train_params['learning_rate_init'], global_step, lr_decay_freq, @@ -620,6 +620,8 @@ def config_learning_rate(lr_decay_freq, global_step): # elif args.lr_type == 'fixed': # return tf.convert_to_tensor(args.learning_rate_init, name='fixed_learning_rate') elif train_params['lr_type'] == 'piecewise': + train_params['pw_boundaries'] = [float(i) * train_batch_num + + train_params['global_step'] for i in train_params['pw_boundaries']] return tf.train.piecewise_constant(global_step, boundaries=[float(i) for i in train_params['pw_boundaries']], values=train_params['pw_values'], name='piecewise_learning_rate') From aecd924816ba904f47993ae34060c748c64ebe2d Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 12:00:15 +0100 Subject: [PATCH 29/65] fixed functions so they work with the transfer learning pipeline code --- .../train_tensorflow_model.py | 465 +++++++++--------- .../transfer_learning/training_data_loader.py | 34 +- 2 files changed, 241 insertions(+), 258 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 1168e5c..a4ac642 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -2,7 +2,6 @@ from __future__ import division, print_function import os -import sys import math import tensorflow as tf import numpy as np @@ -14,241 +13,241 @@ shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 -from traffic_analysis.d00_utils.load_confs import load_parameters, load_paths, load_training_parameters from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names -params = load_parameters() -train_params = load_training_parameters() -paths = load_paths() -truth_dir_path = paths['temp_annotation'] -class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') # CHANGE THIS -classes = read_class_names(class_name_path) -anchors = parse_anchors(paths) -number_classes = len(classes) - -train_data_path = os.path.join(truth_dir_path, 'train.txt') -test_data_path = os.path.join(truth_dir_path, 'test.txt') -train_img_cnt = len(open(train_data_path, 'r').readlines()) -val_img_cnt = len(open(test_data_path, 'r').readlines()) -train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['batch_size'])) - -lr_decay_freq = int(train_batch_num * train_params['lr_decay_epoch']) - -if train_params['lr_type'] == 'piecewise': - pw_boundaries = [float(i) * train_batch_num + train_params['global_step'] for i in train_params['pw_boundaries']] - -logging_file_path = os.path.join(truth_dir_path, 'progress.log') -logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', - datefmt='%a, %d %b %Y %H:%M:%S', filename=logging_file_path, filemode='w') - -is_training = tf.placeholder(tf.bool, name="phase_train") -handle_flag = tf.placeholder(tf.string, [], name='iterator_handle_flag') -pred_boxes_flag = tf.placeholder(tf.float32, [1, None, None]) -pred_scores_flag = tf.placeholder(tf.float32, [1, None, None]) -gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, number_classes, train_params['nms_topk'], - train_params['score_threshold'], train_params['nms_threshold']) - -train_dataset = tf.data.TextLineDataset(train_data_path) -train_dataset = train_dataset.shuffle(train_img_cnt) -train_dataset = train_dataset.batch(train_params['batch_size']) -train_dataset = train_dataset.map( - lambda x: tf.py_func(get_batch_data, - inp=[x, number_classes, [416, 416], anchors, 'train', True, True, True], - Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), - num_parallel_calls=train_params['num_threads']) -train_dataset = train_dataset.prefetch(train_params['prefetech_buffer']) - -test_dataset = tf.data.TextLineDataset(test_data_path) -test_dataset = test_dataset.batch(1) -test_dataset = test_dataset.map( - lambda x: tf.py_func(get_batch_data, - inp=[x, number_classes, [416, 416], anchors, 'val', False, False, True], - Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), - num_parallel_calls=train_params['num_threads']) -test_dataset.prefetch(train_params['prefetech_buffer']) - -iterator = tf.data.Iterator.from_structure(train_dataset.output_types, train_dataset.output_shapes) -train_init_op = iterator.make_initializer(train_dataset) -val_init_op = iterator.make_initializer(test_dataset) - -# get an element from the chosen dataset iterator -image_ids, image, y_true_13, y_true_26, y_true_52 = iterator.get_next() -y_true = [y_true_13, y_true_26, y_true_52] - -# tf.data pipeline will lose the data `static` shape, so we need to set it manually -image_ids.set_shape([None]) -image.set_shape([None, None, None, 3]) -for y in y_true: - y.set_shape([None, None, None, None, None]) - -# define model -yolo_model = YoloV3(number_classes, anchors, use_label_smooth=True, use_focal_loss=True, - batch_norm_decay=train_params['batch_norm_decay'], weight_decay=train_params['weight_decay'], - use_static_shape=False) - -with tf.variable_scope('YoloV3'): - pred_feature_maps = yolo_model.forward(image, is_training=is_training) -loss = yolo_model.compute_loss(pred_feature_maps, y_true) -y_pred = yolo_model.predict(pred_feature_maps) - -l2_loss = tf.losses.get_regularization_loss() - -# setting restore parts and vars to update -saver_to_restore = tf.train.Saver( - var_list=tf.contrib.framework.get_variables_to_restore( - include=None, - exclude=['YoloV3/yolov3_head/Conv_14', 'YoloV3/yolov3_head/Conv_6', 'YoloV3/yolov3_head/Conv_22'])) -update_vars = tf.contrib.framework.get_variables_to_restore(include=['YoloV3/yolov3_head']) - -tf.summary.scalar('train_batch_statistics/total_loss', loss[0]) -tf.summary.scalar('train_batch_statistics/loss_xy', loss[1]) -tf.summary.scalar('train_batch_statistics/loss_wh', loss[2]) -tf.summary.scalar('train_batch_statistics/loss_conf', loss[3]) -tf.summary.scalar('train_batch_statistics/loss_class', loss[4]) -tf.summary.scalar('train_batch_statistics/loss_l2', l2_loss) -tf.summary.scalar('train_batch_statistics/loss_ratio', l2_loss / loss[0]) - -global_step = tf.Variable(float(train_params['global_step']), - trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES]) - -learning_rate = tf.cond(tf.less(global_step, train_batch_num * train_params['warm_up_epoch']), - lambda: train_params['learning_rate_init'] * - global_step / (train_batch_num * train_params['warm_up_epoch']), - lambda: config_learning_rate(lr_decay_freq=lr_decay_freq, global_step=global_step - - train_batch_num * train_params['warm_up_epoch'])) -tf.summary.scalar('learning_rate', learning_rate) - -if not train_params['save_optimizer']: - saver_to_save = tf.train.Saver() - saver_best = tf.train.Saver() - -optimizer = config_optimizer(train_params['optimizer_name'], learning_rate) - -# set dependencies for BN ops -update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) -with tf.control_dependencies(update_ops): - # train_op = optimizer.minimize(loss[0] + l2_loss, var_list=update_vars, global_step=global_step) - # apply gradient clip to avoid gradient exploding - gvs = optimizer.compute_gradients(loss[0] + l2_loss, var_list=update_vars) - clip_grad_var = [gv if gv[0] is None else [ - tf.clip_by_norm(gv[0], 100.), gv[1]] for gv in gvs] - train_op = optimizer.apply_gradients(clip_grad_var, global_step=global_step) - -if train_params['save_optimizer']: - print('Saving optimizer parameters to checkpoint! Remember to restore the global_step in the fine-tuning afterwards.') - saver_to_save = tf.train.Saver() - saver_best = tf.train.Saver() - -tensorboard_log_path = os.path.join(truth_dir_path, 'tensorboard_logs') -yolov3_tensorflow_path = os.path.join(paths['local_detection_model'], params['detection_model'], 'yolov3.ckpt') -with tf.Session() as sess: - sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) - saver_to_restore.restore(sess, yolov3_tensorflow_path) - merged = tf.summary.merge_all() - writer = tf.summary.FileWriter(tensorboard_log_path, sess.graph) - - print('\n----------- start to train -----------\n') - - best_mAP = -np.Inf - - for epoch in range(train_params['total_epochs']): - - sess.run(train_init_op) - loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), \ - AverageMeter(), AverageMeter() - - for i in trange(train_batch_num): - _, summary, __y_pred, __y_true, __loss, __global_step, __lr = sess.run( - [train_op, merged, y_pred, y_true, loss, global_step, learning_rate], - feed_dict={is_training: True}) - - writer.add_summary(summary, global_step=__global_step) - - loss_total.update(__loss[0], len(__y_pred[0])) - loss_xy.update(__loss[1], len(__y_pred[0])) - loss_wh.update(__loss[2], len(__y_pred[0])) - loss_conf.update(__loss[3], len(__y_pred[0])) - loss_class.update(__loss[4], len(__y_pred[0])) - - if __global_step % train_params['train_evaluation_step'] == 0 and __global_step > 0: - # recall, precision = evaluate_on_cpu(__y_pred, __y_true, args.number_classes, args.nms_topk, args.score_threshold, args.nms_threshold) - recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, - __y_pred, __y_true, number_classes, train_params['nms_threshold']) - - info = "Epoch: {}, global_step: {} | loss: total: {:.2f}, xy: {:.2f}, " \ - "wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format( - epoch, int(__global_step), loss_total.average, loss_xy.average, loss_wh.average, loss_conf.average, loss_class.average) - info += 'Last batch: rec: {:.3f}, prec: {:.3f} | lr: {:.5g}'.format(recall, precision, __lr) +def transfer_learn(paths, params, train_params, train_file, test_file): + """ trains last three layers of yolov3 network on custom dataset + """ + + truth_dir_path = paths['temp_annotation'] + class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') # CHANGE THIS + classes = read_class_names(class_name_path) + anchors = parse_anchors(paths) + number_classes = len(classes) + + train_data_path = os.path.join(truth_dir_path, train_file) + test_data_path = os.path.join(truth_dir_path, test_file) + train_img_cnt = len(open(train_data_path, 'r').readlines()) + val_img_cnt = len(open(test_data_path, 'r').readlines()) + train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['batch_size'])) + + lr_decay_freq = int(train_batch_num * train_params['lr_decay_epoch']) + + logging_file_path = os.path.join(truth_dir_path, 'progress.log') + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', + datefmt='%a, %d %b %Y %H:%M:%S', filename=logging_file_path, filemode='w') + + is_training = tf.placeholder(tf.bool, name="phase_train") + handle_flag = tf.placeholder(tf.string, [], name='iterator_handle_flag') + pred_boxes_flag = tf.placeholder(tf.float32, [1, None, None]) + pred_scores_flag = tf.placeholder(tf.float32, [1, None, None]) + gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, number_classes, train_params['nms_topk'], + train_params['score_threshold'], train_params['nms_threshold']) + + train_dataset = tf.data.TextLineDataset(train_data_path) + train_dataset = train_dataset.shuffle(train_img_cnt) + train_dataset = train_dataset.batch(train_params['batch_size']) + train_dataset = train_dataset.map( + lambda x: tf.py_func(get_batch_data, + inp=[x, number_classes, [416, 416], anchors, 'train', True, True, True], + Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), + num_parallel_calls=train_params['num_threads']) + train_dataset = train_dataset.prefetch(train_params['prefetech_buffer']) + + test_dataset = tf.data.TextLineDataset(test_data_path) + test_dataset = test_dataset.batch(1) + test_dataset = test_dataset.map( + lambda x: tf.py_func(get_batch_data, + inp=[x, number_classes, [416, 416], anchors, 'val', False, False, True], + Tout=[tf.int64, tf.float32, tf.float32, tf.float32, tf.float32]), + num_parallel_calls=train_params['num_threads']) + test_dataset.prefetch(train_params['prefetech_buffer']) + + iterator = tf.data.Iterator.from_structure(train_dataset.output_types, train_dataset.output_shapes) + train_init_op = iterator.make_initializer(train_dataset) + val_init_op = iterator.make_initializer(test_dataset) + + # get an element from the chosen dataset iterator + image_ids, image, y_true_13, y_true_26, y_true_52 = iterator.get_next() + y_true = [y_true_13, y_true_26, y_true_52] + + # tf.data pipeline will lose the data `static` shape, so we need to set it manually + image_ids.set_shape([None]) + image.set_shape([None, None, None, 3]) + for y in y_true: + y.set_shape([None, None, None, None, None]) + + # define model + yolo_model = YoloV3(number_classes, anchors, use_label_smooth=True, use_focal_loss=True, + batch_norm_decay=train_params['batch_norm_decay'], weight_decay=train_params['weight_decay'], + use_static_shape=False) + + with tf.variable_scope('YoloV3'): + pred_feature_maps = yolo_model.forward(image, is_training=is_training) + loss = yolo_model.compute_loss(pred_feature_maps, y_true) + y_pred = yolo_model.predict(pred_feature_maps) + + l2_loss = tf.losses.get_regularization_loss() + + # setting restore parts and vars to update + saver_to_restore = tf.train.Saver( + var_list=tf.contrib.framework.get_variables_to_restore( + include=None, + exclude=['YoloV3/yolov3_head/Conv_14', 'YoloV3/yolov3_head/Conv_6', 'YoloV3/yolov3_head/Conv_22'])) + update_vars = tf.contrib.framework.get_variables_to_restore(include=['YoloV3/yolov3_head']) + + tf.summary.scalar('train_batch_statistics/total_loss', loss[0]) + tf.summary.scalar('train_batch_statistics/loss_xy', loss[1]) + tf.summary.scalar('train_batch_statistics/loss_wh', loss[2]) + tf.summary.scalar('train_batch_statistics/loss_conf', loss[3]) + tf.summary.scalar('train_batch_statistics/loss_class', loss[4]) + tf.summary.scalar('train_batch_statistics/loss_l2', l2_loss) + tf.summary.scalar('train_batch_statistics/loss_ratio', l2_loss / loss[0]) + + global_step = tf.Variable(float(train_params['global_step']), + trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES]) + + learning_rate = tf.cond(tf.less(global_step, train_batch_num * train_params['warm_up_epoch']), + lambda: train_params['learning_rate_init'] * + global_step / (train_batch_num * train_params['warm_up_epoch']), + lambda: config_learning_rate(lr_decay_freq=lr_decay_freq, train_batch_num=train_batch_num, + global_step=global_step - + train_batch_num * train_params['warm_up_epoch'])) + tf.summary.scalar('learning_rate', learning_rate) + + if not train_params['save_optimizer']: + saver_to_save = tf.train.Saver() + saver_best = tf.train.Saver() + + optimizer = config_optimizer(train_params['optimizer_name'], learning_rate) + + # set dependencies for BN ops + update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) + with tf.control_dependencies(update_ops): + # train_op = optimizer.minimize(loss[0] + l2_loss, var_list=update_vars, global_step=global_step) + # apply gradient clip to avoid gradient exploding + gvs = optimizer.compute_gradients(loss[0] + l2_loss, var_list=update_vars) + clip_grad_var = [gv if gv[0] is None else [ + tf.clip_by_norm(gv[0], 100.), gv[1]] for gv in gvs] + train_op = optimizer.apply_gradients(clip_grad_var, global_step=global_step) + + if train_params['save_optimizer']: + print('Saving optimizer parameters to checkpoint! Remember to restore the global_step in the fine-tuning afterwards.') + saver_to_save = tf.train.Saver() + saver_best = tf.train.Saver() + + tensorboard_log_path = os.path.join(truth_dir_path, 'tensorboard_logs') + yolov3_tensorflow_path = os.path.join(paths['local_detection_model'], params['detection_model'], 'yolov3.ckpt') + with tf.Session() as sess: + sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) + saver_to_restore.restore(sess, yolov3_tensorflow_path) + merged = tf.summary.merge_all() + writer = tf.summary.FileWriter(tensorboard_log_path, sess.graph) + + print('\n----------- start to train -----------\n') + + best_mAP = -np.Inf + + for epoch in range(train_params['total_epochs']): + + sess.run(train_init_op) + loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), \ + AverageMeter(), AverageMeter() + + for i in trange(train_batch_num): + _, summary, __y_pred, __y_true, __loss, __global_step, __lr = sess.run( + [train_op, merged, y_pred, y_true, loss, global_step, learning_rate], + feed_dict={is_training: True}) + + writer.add_summary(summary, global_step=__global_step) + + loss_total.update(__loss[0], len(__y_pred[0])) + loss_xy.update(__loss[1], len(__y_pred[0])) + loss_wh.update(__loss[2], len(__y_pred[0])) + loss_conf.update(__loss[3], len(__y_pred[0])) + loss_class.update(__loss[4], len(__y_pred[0])) + + if __global_step % train_params['train_evaluation_step'] == 0 and __global_step > 0: + # recall, precision = evaluate_on_cpu(__y_pred, __y_true, args.number_classes, args.nms_topk, args.score_threshold, args.nms_threshold) + recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, + __y_pred, __y_true, number_classes, train_params['nms_threshold']) + + info = "Epoch: {}, global_step: {} | loss: total: {:.2f}, xy: {:.2f}, " \ + "wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format( + epoch, int(__global_step), loss_total.average, loss_xy.average, loss_wh.average, loss_conf.average, loss_class.average) + info += 'Last batch: rec: {:.3f}, prec: {:.3f} | lr: {:.5g}'.format(recall, precision, __lr) + print(info) + logging.info(info) + + writer.add_summary(make_summary('evaluation/train_batch_recall', recall), global_step=__global_step) + writer.add_summary(make_summary('evaluation/train_batch_precision', precision), global_step=__global_step) + + if np.isnan(loss_total.average): + print('****' * 10) + raise ArithmeticError( + 'Gradient exploded! Please train again and you may need modify some parameters.') + + # NOTE: this is just demo. You can set the conditions when to save the weights. + if epoch % train_params['save_epoch == 0'] and epoch > 0: + if loss_total.average <= 2.: + saver_to_save.save(sess, os.path.join(train_params['trained_model_name'], + 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr))) + + # switch to validation dataset for evaluation + if epoch % train_params['val_evaluation_epoch'] == 0 and epoch >= train_params['warm_up_epoch']: + sess.run(val_init_op) + + val_loss_total, val_loss_xy, val_loss_wh, val_loss_conf, val_loss_class = \ + AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter() + + val_preds = [] + + for j in trange(val_img_cnt): + __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], + feed_dict={is_training: False}) + pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) + val_preds.extend(pred_content) + val_loss_total.update(__loss[0]) + val_loss_xy.update(__loss[1]) + val_loss_wh.update(__loss[2]) + val_loss_conf.update(__loss[3]) + val_loss_class.update(__loss[4]) + + # calc mAP + rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() + gt_dict = parse_gt_rec(test_data_path, [416, 416], letterbox_resize=True) + + info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) + + for ii in range(number_classes): + npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=train_params['eval_threshold'], + use_07_metric=False) + info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) + rec_total.update(rec, npos) + prec_total.update(prec, nd) + ap_total.update(ap, 1) + + mAP = ap_total.average + info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) + info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( + val_loss_total.average, val_loss_xy.average, val_loss_wh.average, val_loss_conf.average, val_loss_class.average) print(info) logging.info(info) - - writer.add_summary(make_summary('evaluation/train_batch_recall', recall), global_step=__global_step) - writer.add_summary(make_summary('evaluation/train_batch_precision', precision), global_step=__global_step) - - if np.isnan(loss_total.average): - print('****' * 10) - raise ArithmeticError( - 'Gradient exploded! Please train again and you may need modify some parameters.') - - # NOTE: this is just demo. You can set the conditions when to save the weights. - if epoch % train_params['save_epoch == 0'] and epoch > 0: - if loss_total.average <= 2.: - saver_to_save.save(sess, os.path.join(train_params['trained_model_name'], - 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr))) - - # switch to validation dataset for evaluation - if epoch % train_params['val_evaluation_epoch'] == 0 and epoch >= train_params['warm_up_epoch']: - sess.run(val_init_op) - - val_loss_total, val_loss_xy, val_loss_wh, val_loss_conf, val_loss_class = \ - AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter() - - val_preds = [] - - for j in trange(val_img_cnt): - __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], - feed_dict={is_training: False}) - pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) - val_preds.extend(pred_content) - val_loss_total.update(__loss[0]) - val_loss_xy.update(__loss[1]) - val_loss_wh.update(__loss[2]) - val_loss_conf.update(__loss[3]) - val_loss_class.update(__loss[4]) - - # calc mAP - rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() - gt_dict = parse_gt_rec(test_data_path, [416, 416], letterbox_resize=True) - - info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) - - for ii in range(number_classes): - npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=train_params['eval_threshold'], - use_07_metric=False) - info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) - rec_total.update(rec, npos) - prec_total.update(prec, nd) - ap_total.update(ap, 1) - - mAP = ap_total.average - info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) - info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( - val_loss_total.average, val_loss_xy.average, val_loss_wh.average, val_loss_conf.average, val_loss_class.average) - print(info) - logging.info(info) - - if mAP > best_mAP: - best_mAP = mAP - saver_best.save(sess, os.path.join(train_params['trained_model_name'], - 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr))) - - writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) - writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), global_step=epoch) - writer.add_summary(make_summary('evaluation/val_precision', prec_total.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/total_loss', val_loss_total.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_xy', val_loss_xy.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_wh', val_loss_wh.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_conf', val_loss_conf.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_class', val_loss_class.average), global_step=epoch) + + if mAP > best_mAP: + best_mAP = mAP + saver_best.save(sess, os.path.join(train_params['trained_model_name'], + 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr))) + + writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) + writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), global_step=epoch) + writer.add_summary(make_summary('evaluation/val_precision', prec_total.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/total_loss', val_loss_total.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_xy', val_loss_xy.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_wh', val_loss_wh.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_conf', val_loss_conf.average), global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_class', val_loss_class.average), global_step=epoch) + + return diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 02f81fd..76f7eff 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -5,9 +5,6 @@ import numpy as np from enum import Enum -ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../..') -sys.path.append(ospath) - from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 from traffic_analysis.d00_utils.data_retrieval import delete_and_recreate_dir, mp4_to_npy @@ -87,11 +84,11 @@ def load_detrac_data(self): impath = labels.split(' ')[1] folder = impath.split('/')[-1][:9] - file_to_download = paths['s3_detrac_images'] + \ + file_to_download = self.paths['s3_detrac_images'] + \ folder + '/' + \ 'img' + image_num + '.jpg' - download_file_to = paths['temp_raw_images'] + \ + download_file_to = self.paths['temp_raw_images'] + \ folder + '_' + \ image_num + '.jpg' @@ -113,7 +110,7 @@ def parse_detrac_xml_file(self, xml_file): xml_file_name = xml_file.split('/')[-1] xml_path = self.paths['temp_annotation'] + xml_file.split('/')[-1] - class_names_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') + class_names_path = os.path.join(self.paths['local_detection_model'], 'yolov3', 'coco.names') classes = read_class_names(class_names_path) try: @@ -209,10 +206,13 @@ def load_cvat_data(self): def get_cvat_video(self, xml_file_name): - video_path = get_s3_video_path_from_xml_name(xml_file_name=xml_file_name, s3_creds=self.creds[paths['s3_creds']], paths=self.paths) + video_path = get_s3_video_path_from_xml_name(xml_file_name=xml_file_name, + s3_creds=self.creds[self.paths['s3_creds']], + paths=self.paths) if(video_path): - download_file_to = paths['temp_raw_video'] + 'test' + '.mp4' - self.data_loader_s3.download_file(path_of_file_to_download=video_path, path_to_download_file_to=download_file_to) + download_file_to = self.paths['temp_raw_video'] + 'test' + '.mp4' + self.data_loader_s3.download_file(path_of_file_to_download=video_path, + path_to_download_file_to=download_file_to) return mp4_to_npy(download_file_to) else: return @@ -265,19 +265,3 @@ def parse_cvat_xml_file(self, xml_file): return results else: return None - - -paths = load_paths() -creds = load_credentials() - -dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) -x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) - -saved_text_files_dir = paths['temp_annotation'] -with open(saved_text_files_dir + 'train.txt', 'w') as f: - for item in y_train: - f.write("%s\n" % item) - -with open(saved_text_files_dir + 'test.txt', 'w') as f: - for item in y_test: - f.write("%s\n" % item) \ No newline at end of file From ed8a8954bdc14bfef4ccc6e87208184f6d2ab781 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 12:09:49 +0100 Subject: [PATCH 30/65] fixed a bug with save_epoch --- .../d04_modelling/transfer_learning/train_tensorflow_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index a4ac642..13c5f8a 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -190,7 +190,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file): 'Gradient exploded! Please train again and you may need modify some parameters.') # NOTE: this is just demo. You can set the conditions when to save the weights. - if epoch % train_params['save_epoch == 0'] and epoch > 0: + if epoch % train_params['save_epoch'] == 0 and epoch > 0: if loss_total.average <= 2.: saver_to_save.save(sess, os.path.join(train_params['trained_model_name'], 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr))) From 2866aede589a83373401cd2e8e136eb155f0020e Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 12:39:57 +0100 Subject: [PATCH 31/65] batch size --> num batches --- conf/base/training_parameters.yml | 2 +- .../transfer_learning/tensorflow_training_utils.py | 3 +++ .../d04_modelling/transfer_learning/train_tensorflow_model.py | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index e6c25e4..9b4d362 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,5 +1,5 @@ training: - batch_size : 6 + num_batches : 30 letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. total_epochs : 100 train_evaluation_step : 100 # Evaluate on the training batch after some steps. diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index 9b41dfd..b9e8c3b 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -1033,6 +1033,9 @@ def voc_eval(gt_dict, val_preds, classidx, iou_thres=0.5, use_07_metric=False): # compute precision recall fp = np.cumsum(fp) tp = np.cumsum(tp) + print(tp) + print(float(nd)) + print(npos) rec = tp / float(npos) # avoid divide by zero in case the first detection matches a difficult # ground truth diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 13c5f8a..91dc2c2 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -31,7 +31,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file): test_data_path = os.path.join(truth_dir_path, test_file) train_img_cnt = len(open(train_data_path, 'r').readlines()) val_img_cnt = len(open(test_data_path, 'r').readlines()) - train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['batch_size'])) + train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['num_batches'])) lr_decay_freq = int(train_batch_num * train_params['lr_decay_epoch']) @@ -48,7 +48,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file): train_dataset = tf.data.TextLineDataset(train_data_path) train_dataset = train_dataset.shuffle(train_img_cnt) - train_dataset = train_dataset.batch(train_params['batch_size']) + train_dataset = train_dataset.batch(train_params['num_batches']) train_dataset = train_dataset.map( lambda x: tf.py_func(get_batch_data, inp=[x, number_classes, [416, 416], anchors, 'train', True, True, True], From 45b7da853d35f651362b9f849fd6876be053e12f Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 13:18:44 +0100 Subject: [PATCH 32/65] added evaluation on gpu instead of wrong fn --- .../tensorflow_training_utils.py | 2 +- .../train_tensorflow_model.py | 20 +++++++++++-------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index b9e8c3b..efdd478 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -1162,7 +1162,7 @@ def get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, image_ids return pred_content -def voc_ap(rec, prec, use_07_metric=False): +def voc_ap(rec, prec, use_07_metric=True): """Compute VOC AP given precision and recall. If use_07_metric is true, uses the VOC 07 11-point method (default:False). """ diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 91dc2c2..d9759ed 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -11,7 +11,7 @@ from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ - evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms + evaluate_on_gpu, get_preds_gpu, voc_ap, parse_gt_rec, gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names @@ -222,17 +222,21 @@ def transfer_learn(paths, params, train_params, train_file, test_file): info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) for ii in range(number_classes): - npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=train_params['eval_threshold'], - use_07_metric=False) - info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) - rec_total.update(rec, npos) - prec_total.update(prec, nd) - ap_total.update(ap, 1) - + recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, + __y_pred, __y_true, number_classes, train_params['nms_threshold']) + average_precision = voc_ap(recall, precision, use_07_metric=True) + info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, recall, + precision, + average_precision) + rec_total.update(recall) + prec_total.update(precision) + mAP = ap_total.average + info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( val_loss_total.average, val_loss_xy.average, val_loss_wh.average, val_loss_conf.average, val_loss_class.average) + print(info) logging.info(info) From b53827b7d00e58cef7f24683226a8f789d0f6cc9 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 15:36:18 +0100 Subject: [PATCH 33/65] reverted changes from last commit --- conf/base/training_parameters.yml | 2 +- .../tensorflow_training_utils.py | 3 -- .../train_tensorflow_model.py | 30 ++++++++----------- 3 files changed, 14 insertions(+), 21 deletions(-) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index 9b4d362..7b31b1d 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,5 +1,5 @@ training: - num_batches : 30 + num_batches : 10 letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. total_epochs : 100 train_evaluation_step : 100 # Evaluate on the training batch after some steps. diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index efdd478..e093912 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -1033,9 +1033,6 @@ def voc_eval(gt_dict, val_preds, classidx, iou_thres=0.5, use_07_metric=False): # compute precision recall fp = np.cumsum(fp) tp = np.cumsum(tp) - print(tp) - print(float(nd)) - print(npos) rec = tp / float(npos) # avoid divide by zero in case the first detection matches a difficult # ground truth diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index d9759ed..0c0b153 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -11,7 +11,7 @@ from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ - evaluate_on_gpu, get_preds_gpu, voc_ap, parse_gt_rec, gpu_nms + evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names @@ -32,20 +32,20 @@ def transfer_learn(paths, params, train_params, train_file, test_file): train_img_cnt = len(open(train_data_path, 'r').readlines()) val_img_cnt = len(open(test_data_path, 'r').readlines()) train_batch_num = int(math.ceil(float(train_img_cnt) / train_params['num_batches'])) - + lr_decay_freq = int(train_batch_num * train_params['lr_decay_epoch']) - + logging_file_path = os.path.join(truth_dir_path, 'progress.log') logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=logging_file_path, filemode='w') - + is_training = tf.placeholder(tf.bool, name="phase_train") handle_flag = tf.placeholder(tf.string, [], name='iterator_handle_flag') pred_boxes_flag = tf.placeholder(tf.float32, [1, None, None]) pred_scores_flag = tf.placeholder(tf.float32, [1, None, None]) - gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, number_classes, train_params['nms_topk'], + gpu_nms_op = gpu_nms(pred_boxes_flag, pred_scores_flag, number_classes, train_params['nms_topk'], train_params['score_threshold'], train_params['nms_threshold']) - + train_dataset = tf.data.TextLineDataset(train_data_path) train_dataset = train_dataset.shuffle(train_img_cnt) train_dataset = train_dataset.batch(train_params['num_batches']) @@ -220,23 +220,19 @@ def transfer_learn(paths, params, train_params, train_file, test_file): gt_dict = parse_gt_rec(test_data_path, [416, 416], letterbox_resize=True) info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) - + for ii in range(number_classes): - recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, - __y_pred, __y_true, number_classes, train_params['nms_threshold']) - average_precision = voc_ap(recall, precision, use_07_metric=True) - info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, recall, - precision, - average_precision) - rec_total.update(recall) - prec_total.update(precision) + npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=train_params['eval_threshold'], + use_07_metric=False) + info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) + rec_total.update(rec, npos) + prec_total.update(prec, nd) + ap_total.update(ap, 1) mAP = ap_total.average - info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( val_loss_total.average, val_loss_xy.average, val_loss_wh.average, val_loss_conf.average, val_loss_class.average) - print(info) logging.info(info) From 0f7a52f25b481f2e66288560af265fb7088f3dd6 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 16:23:05 +0100 Subject: [PATCH 34/65] speed up map calculation --- .../train_tensorflow_model.py | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 0c0b153..96c130c 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -17,13 +17,18 @@ from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names -def transfer_learn(paths, params, train_params, train_file, test_file): +def transfer_learn(paths, params, train_params, train_file, test_file, selected_labels): """ trains last three layers of yolov3 network on custom dataset """ truth_dir_path = paths['temp_annotation'] class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') # CHANGE THIS classes = read_class_names(class_name_path) + + selected_label_idxs = [] + for selected_label in selected_labels: + selected_label_idx = classes.index(selected_label) + selected_label_idxs.append(selected_label_idx) anchors = parse_anchors(paths) number_classes = len(classes) @@ -208,7 +213,9 @@ def transfer_learn(paths, params, train_params, train_file, test_file): __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], feed_dict={is_training: False}) pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) + print(pred_content) val_preds.extend(pred_content) + print(val_preds) val_loss_total.update(__loss[0]) val_loss_xy.update(__loss[1]) val_loss_wh.update(__loss[2]) @@ -218,16 +225,20 @@ def transfer_learn(paths, params, train_params, train_file, test_file): # calc mAP rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() gt_dict = parse_gt_rec(test_data_path, [416, 416], letterbox_resize=True) + print(gt_dict) info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) - for ii in range(number_classes): - npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, ii, iou_thres=train_params['eval_threshold'], - use_07_metric=False) - info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(ii, rec, prec, ap) - rec_total.update(rec, npos) - prec_total.update(prec, nd) - ap_total.update(ap, 1) + for class_idx in range(number_classes): + if class_idx in selected_label_idxs: + npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, class_idx, + iou_thres=train_params['eval_threshold'], + use_07_metric=True) + info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(class_idx, + rec, prec, ap) + rec_total.update(rec, npos) + prec_total.update(prec, nd) + ap_total.update(ap, 1) mAP = ap_total.average info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) From 3f54aabd4e20e4e72d3c531ee41a09a3cb9995b7 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 16:27:00 +0100 Subject: [PATCH 35/65] fixed transfer_learning unifying code --- src/run_transfer_learning.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/run_transfer_learning.py b/src/run_transfer_learning.py index 85af946..b98bc78 100644 --- a/src/run_transfer_learning.py +++ b/src/run_transfer_learning.py @@ -25,4 +25,5 @@ params=params, train_params=train_params, train_file='train.txt', - test_file='test.txt') \ No newline at end of file + test_file='test.txt', + selected_labels=params['selected_labels']) \ No newline at end of file From 615b0a0b22329552ffcbaf3763b0452d07ee0d5d Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 16:50:42 +0100 Subject: [PATCH 36/65] changed selected labels idx selection because dict --- .../transfer_learning/tensorflow_detection_utils.py | 1 + .../d04_modelling/transfer_learning/test.py | 4 +++- .../transfer_learning/train_tensorflow_model.py | 6 +++--- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py index 857214c..b131c97 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py @@ -10,6 +10,7 @@ def read_class_names(class_name_path): with open(class_name_path, 'r') as data: for ID, name in enumerate(data): names[ID] = name.strip('\n') + print(names) return names diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test.py b/src/traffic_analysis/d04_modelling/transfer_learning/test.py index 7f7790a..5d81ac6 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/test.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/test.py @@ -1,4 +1,6 @@ from traffic_analysis.d00_utils.get_project_directory import get_project_directory print(get_project_directory()) -print(str(1).zfill(5)) \ No newline at end of file +print(str(1).zfill(5)) + +b = {'a': 5, 'd': 6} \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 96c130c..7e0703e 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -26,9 +26,9 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ classes = read_class_names(class_name_path) selected_label_idxs = [] - for selected_label in selected_labels: - selected_label_idx = classes.index(selected_label) - selected_label_idxs.append(selected_label_idx) + for idx, label in classes.items(): + if idx in selected_labels: + selected_label_idxs.append(idx) anchors = parse_anchors(paths) number_classes = len(classes) From 53bddb2d345c72ec61000c8c27f25f96abd77222 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 17:07:51 +0100 Subject: [PATCH 37/65] removed print statements --- .../transfer_learning/tensorflow_detection_utils.py | 1 - .../d04_modelling/transfer_learning/train_tensorflow_model.py | 3 --- 2 files changed, 4 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py index b131c97..857214c 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_detection_utils.py @@ -10,7 +10,6 @@ def read_class_names(class_name_path): with open(class_name_path, 'r') as data: for ID, name in enumerate(data): names[ID] = name.strip('\n') - print(names) return names diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 7e0703e..f83c20d 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -213,9 +213,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], feed_dict={is_training: False}) pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) - print(pred_content) val_preds.extend(pred_content) - print(val_preds) val_loss_total.update(__loss[0]) val_loss_xy.update(__loss[1]) val_loss_wh.update(__loss[2]) @@ -225,7 +223,6 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ # calc mAP rec_total, prec_total, ap_total = AverageMeter(), AverageMeter(), AverageMeter() gt_dict = parse_gt_rec(test_data_path, [416, 416], letterbox_resize=True) - print(gt_dict) info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) From 692d3fb2641370bef87345ee91233a0440689c39 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 17:23:27 +0100 Subject: [PATCH 38/65] checking the map with print statements --- conf/base/training_parameters.yml | 1 + .../d04_modelling/transfer_learning/train_tensorflow_model.py | 3 +++ 2 files changed, 4 insertions(+) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index 7b31b1d..4cdb187 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -11,6 +11,7 @@ training: warm_up_epoch : 3 # set to larger value if gradient explodes num_threads : 10 # Number of threads for image processing used in tf.data pipeline. prefetech_buffer : 5 # Prefetech_buffer used in tf.data pipeline. + trained_model_name : '20190812_yolov3_trained_test' learning: optimizer_name : 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index f83c20d..77af100 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -228,6 +228,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ for class_idx in range(number_classes): if class_idx in selected_label_idxs: + print(class_idx) npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, class_idx, iou_thres=train_params['eval_threshold'], use_07_metric=True) @@ -237,6 +238,8 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ prec_total.update(prec, nd) ap_total.update(ap, 1) + print(ap_total) + print(type(ap_total)) mAP = ap_total.average info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( From 31203d83a55441a50d4a1dcbda2d091fc42104d7 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 17:52:35 +0100 Subject: [PATCH 39/65] hopefully remove nans --- .../transfer_learning/train_tensorflow_model.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 77af100..8908d3f 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -234,12 +234,14 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ use_07_metric=True) info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(class_idx, rec, prec, ap) - rec_total.update(rec, npos) - prec_total.update(prec, nd) - ap_total.update(ap, 1) - print(ap_total) - print(type(ap_total)) + if math.isnan(rec) or math.isnan(prec) or math.isnan(ap): + pass + else: + rec_total.update(rec, npos) + prec_total.update(prec, nd) + ap_total.update(ap, 1) + mAP = ap_total.average info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( From 9213449bb009329b9202eac02d2b68c025526ce2 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 17:59:28 +0100 Subject: [PATCH 40/65] print statements added to debug --- .../transfer_learning/train_tensorflow_model.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 8908d3f..fe61749 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -227,6 +227,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) for class_idx in range(number_classes): + print(class_idx) if class_idx in selected_label_idxs: print(class_idx) npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, class_idx, @@ -235,6 +236,10 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(class_idx, rec, prec, ap) + print(prec) + print(rec) + print(ap) + if math.isnan(rec) or math.isnan(prec) or math.isnan(ap): pass else: From 8ffeaaa564f2045ab62bf185f142ae717ffa51a6 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 18:19:40 +0100 Subject: [PATCH 41/65] fixed error with selected_labels_idx --- .../d04_modelling/transfer_learning/train_tensorflow_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index fe61749..02599dc 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -27,7 +27,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ selected_label_idxs = [] for idx, label in classes.items(): - if idx in selected_labels: + if label in selected_labels: selected_label_idxs.append(idx) anchors = parse_anchors(paths) number_classes = len(classes) From 7c4b5836fa0a77619d63081e2013338d95cda139 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 12 Aug 2019 18:28:27 +0100 Subject: [PATCH 42/65] seeing if label idxs are actually strings --- .../d04_modelling/transfer_learning/train_tensorflow_model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 02599dc..0461dab 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -31,6 +31,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ selected_label_idxs.append(idx) anchors = parse_anchors(paths) number_classes = len(classes) + print(selected_label_idxs) train_data_path = os.path.join(truth_dir_path, train_file) test_data_path = os.path.join(truth_dir_path, test_file) From 8b5038479f8d174627b739e4bb33944dc4156b4b Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 10:16:23 +0100 Subject: [PATCH 43/65] try to run with just 50 epochs --- conf/base/training_parameters.yml | 2 +- .../transfer_learning/train_tensorflow_model.py | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index 4cdb187..a4e08ba 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,7 +1,7 @@ training: num_batches : 10 letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. - total_epochs : 100 + total_epochs : 20 train_evaluation_step : 100 # Evaluate on the training batch after some steps. val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. save_epoch : 10 # Save the model after some epochs. diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 0461dab..23876a1 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -31,7 +31,6 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ selected_label_idxs.append(idx) anchors = parse_anchors(paths) number_classes = len(classes) - print(selected_label_idxs) train_data_path = os.path.join(truth_dir_path, train_file) test_data_path = os.path.join(truth_dir_path, test_file) @@ -228,19 +227,13 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ info = '======> Epoch: {}, global_step: {}, lr: {:.6g} <======\n'.format(epoch, __global_step, __lr) for class_idx in range(number_classes): - print(class_idx) if class_idx in selected_label_idxs: - print(class_idx) npos, nd, rec, prec, ap = voc_eval(gt_dict, val_preds, class_idx, iou_thres=train_params['eval_threshold'], use_07_metric=True) info += 'EVAL: Class {}: Recall: {:.4f}, Precision: {:.4f}, AP: {:.4f}\n'.format(class_idx, rec, prec, ap) - print(prec) - print(rec) - print(ap) - if math.isnan(rec) or math.isnan(prec) or math.isnan(ap): pass else: From f4f3286c16a347dbc2c9353a4e14286ffe9e017b Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 11:08:00 +0100 Subject: [PATCH 44/65] puts trained model in correct directory --- .../transfer_learning/train_tensorflow_model.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index 23876a1..bbdc3b3 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -21,6 +21,10 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ """ trains last three layers of yolov3 network on custom dataset """ + transfer_learn_model_dir = os.path.join(paths['local_detection_model'], train_params['trained_model_name']) + if not os.path.exists(transfer_learn_model_dir): + os.makedirs(transfer_learn_model_dir) + truth_dir_path = paths['temp_annotation'] class_name_path = os.path.join(paths['local_detection_model'], 'yolov3', 'coco.names') # CHANGE THIS classes = read_class_names(class_name_path) @@ -250,7 +254,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ if mAP > best_mAP: best_mAP = mAP - saver_best.save(sess, os.path.join(train_params['trained_model_name'], + saver_best.save(sess, os.path.join(transfer_learn_model_dir, 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr))) writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) From b3c3070ddd4ad5895e2ec96006e3f5e9366accb6 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 11:48:44 +0100 Subject: [PATCH 45/65] moved so i can run the code --- src/test_detection.py | 0 .../transfer_learning/test_detection.py | 52 ------------------- 2 files changed, 52 deletions(-) create mode 100644 src/test_detection.py delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py diff --git a/src/test_detection.py b/src/test_detection.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py b/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py deleted file mode 100644 index e56e39c..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py +++ /dev/null @@ -1,52 +0,0 @@ -import time -import cv2 -import sys -import os -import random - -ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..') -sys.path.append(ospath) - -from traffic_analysis.d00_utils.load_confs import load_parameters, load_paths, load_credentials -from traffic_analysis.d04_modelling.perform_detection_tensorflow import perform_detections_in_single_image as tensorflow_detect -from traffic_analysis.d04_modelling.perform_detection_opencv import detect_objects_in_image as opencv_detect - - -def test_detection(image_path): - paths = load_paths() - params = load_parameters() - s3_credentials = load_credentials() - detection_method = params['detection_model'] - - imcap = cv2.imread(image_path) - - if detection_method == 'yolov3': - start_time = time.time() - bbox, label, confidence = opencv_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials) - end_time = time.time() - - elif detection_method == 'yolov3-tiny': - start_time = time.time() - bbox, label, confidence = opencv_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials) - end_time = time.time() - - elif detection_method == 'yolov3_tf': - start_time = time.time() - bbox, label, confidence = tensorflow_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials, - selected_labels=None) - end_time = time.time() - - for box in bbox: - color = [random.randint(0, 255) for _ in range(3)] - c1 = (box[0], box[1]) - c2 = (box[2]+box[0], box[1]+box[3]) - cv2.rectangle(imcap, c1, c2, color) - - cv2.imwrite(image_path[:-4] + 'out' + image_path[-4:], imcap) - - print(bbox) - print(imcap.shape) - - -test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') -# test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') From 62ffdf553a30be984bace69da8e1150e815b6d93 Mon Sep 17 00:00:00 2001 From: Jack Hensley Date: Tue, 13 Aug 2019 10:48:56 +0000 Subject: [PATCH 46/65] changed so can try trained model --- .../d04_modelling/transfer_learning/test_detection.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py b/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py index e56e39c..969cdcb 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/test_detection.py @@ -30,7 +30,7 @@ def test_detection(image_path): bbox, label, confidence = opencv_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials) end_time = time.time() - elif detection_method == 'yolov3_tf': + elif detection_method == '20190812_yolov3_trained_test': start_time = time.time() bbox, label, confidence = tensorflow_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials, selected_labels=None) @@ -44,9 +44,10 @@ def test_detection(image_path): cv2.imwrite(image_path[:-4] + 'out' + image_path[-4:], imcap) + print(label) print(bbox) print(imcap.shape) -test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') -# test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') +# test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') +test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') From 77af1cff207ba2cccbff98d62ca37ca58131f4aa Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 12:13:20 +0100 Subject: [PATCH 47/65] changed path to labels --- src/traffic_analysis/d04_modelling/perform_detection_opencv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/perform_detection_opencv.py b/src/traffic_analysis/d04_modelling/perform_detection_opencv.py index 095942d..50d5c36 100644 --- a/src/traffic_analysis/d04_modelling/perform_detection_opencv.py +++ b/src/traffic_analysis/d04_modelling/perform_detection_opencv.py @@ -65,7 +65,7 @@ def populate_labels(model_name: str, paths): """ model_file_path = paths['local_detection_model'] - labels_file_path = os.path.join(model_file_path, model_name, 'coco.names') + labels_file_path = os.path.join(model_file_path, 'yolov3', 'coco.names') f = open(labels_file_path, 'r') labels = [line.strip() for line in f.readlines()] From 97af1ee4872f65b45aee57456f45912cc4657c46 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 15:25:31 +0100 Subject: [PATCH 48/65] break after 10 xmls --- .../d04_modelling/transfer_learning/training_data_loader.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 76f7eff..105fc14 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -77,6 +77,9 @@ def load_detrac_data(self): if (result): y += result + if len(y) == 10: + break + print('Loading detrac images...') x = [] for labels in y: From 2dccde7adf64aa25a8c8f580bff9818c83b0064c Mon Sep 17 00:00:00 2001 From: Jack Hensley Date: Tue, 13 Aug 2019 14:26:05 +0000 Subject: [PATCH 49/65] trying out --- src/test_detection.py | 53 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 src/test_detection.py diff --git a/src/test_detection.py b/src/test_detection.py new file mode 100644 index 0000000..969cdcb --- /dev/null +++ b/src/test_detection.py @@ -0,0 +1,53 @@ +import time +import cv2 +import sys +import os +import random + +ospath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..') +sys.path.append(ospath) + +from traffic_analysis.d00_utils.load_confs import load_parameters, load_paths, load_credentials +from traffic_analysis.d04_modelling.perform_detection_tensorflow import perform_detections_in_single_image as tensorflow_detect +from traffic_analysis.d04_modelling.perform_detection_opencv import detect_objects_in_image as opencv_detect + + +def test_detection(image_path): + paths = load_paths() + params = load_parameters() + s3_credentials = load_credentials() + detection_method = params['detection_model'] + + imcap = cv2.imread(image_path) + + if detection_method == 'yolov3': + start_time = time.time() + bbox, label, confidence = opencv_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials) + end_time = time.time() + + elif detection_method == 'yolov3-tiny': + start_time = time.time() + bbox, label, confidence = opencv_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials) + end_time = time.time() + + elif detection_method == '20190812_yolov3_trained_test': + start_time = time.time() + bbox, label, confidence = tensorflow_detect(imcap, paths=paths, params=params, s3_credentials=s3_credentials, + selected_labels=None) + end_time = time.time() + + for box in bbox: + color = [random.randint(0, 255) for _ in range(3)] + c1 = (box[0], box[1]) + c2 = (box[2]+box[0], box[1]+box[3]) + cv2.rectangle(imcap, c1, c2, color) + + cv2.imwrite(image_path[:-4] + 'out' + image_path[-4:], imcap) + + print(label) + print(bbox) + print(imcap.shape) + + +# test_detection('C:/Users/joh3146/Documents/dssg/air_pollution_estimation/data/frame_level/frame001.jpg') +test_detection('/home/jack_hensley/air_pollution_estimation/data/frame_level/frame001.jpg') From 66464c004e360b3ed3190025cd140055010da9ba Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 16:04:14 +0100 Subject: [PATCH 50/65] print statement added to track downloading prog --- .../d04_modelling/transfer_learning/training_data_loader.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 105fc14..6587b32 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -86,6 +86,7 @@ def load_detrac_data(self): image_num = labels.split(' ')[0].zfill(5) impath = labels.split(' ')[1] folder = impath.split('/')[-1][:9] + print('---- downloading images from %s ----' % folder) file_to_download = self.paths['s3_detrac_images'] + \ folder + '/' + \ From 47cc6fbb66ba70eb46489d8b654467f8907561bb Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 16:17:34 +0100 Subject: [PATCH 51/65] correct break message now after 10 counts --- .../d04_modelling/transfer_learning/training_data_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 6587b32..3c2e6c6 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -77,7 +77,7 @@ def load_detrac_data(self): if (result): y += result - if len(y) == 10: + if xml_file == 11: break print('Loading detrac images...') From d4109b979f93b8049d6a0737a0385168781efa7f Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 16:30:44 +0100 Subject: [PATCH 52/65] ok now break should actually work --- conf/base/training_parameters.yml | 2 +- .../d04_modelling/transfer_learning/training_data_loader.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index a4e08ba..c4dbaa6 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,7 +1,7 @@ training: num_batches : 10 letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. - total_epochs : 20 + total_epochs : 50 train_evaluation_step : 100 # Evaluate on the training batch after some steps. val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. save_epoch : 10 # Save the model after some epochs. diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 3c2e6c6..d35b3c9 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -72,13 +72,15 @@ def load_detrac_data(self): print('Parsing detrac xmls...') y = [] xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_detrac_annotations']) + count = 0 for xml_file in xml_files: result = self.parse_detrac_xml_file(xml_file) if (result): y += result - if xml_file == 11: + if count == 10: break + count += 1 print('Loading detrac images...') x = [] From 255efe86f9c05e1985d58ac408952377ba46c732 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 23:01:05 +0100 Subject: [PATCH 53/65] added cvat annotations to the mix --- conf/base/paths.yml | 1 + src/run_transfer_learning.py | 4 +- .../transfer_learning/training_data_loader.py | 90 +++++++++++++------ 3 files changed, 64 insertions(+), 31 deletions(-) diff --git a/conf/base/paths.yml b/conf/base/paths.yml index b247d87..af2290d 100644 --- a/conf/base/paths.yml +++ b/conf/base/paths.yml @@ -10,6 +10,7 @@ s3_paths: s3_cvat_annotations: "ref/annotations/cvat/" s3_detrac_annotations: "ref/annotations/detrac/" s3_detrac_images: "raw/images/detrac/" + s3_cvat_training_annotations: "ref/annotations/cvat_train/" local_paths: temp_video: "data/temp/videos/" diff --git a/src/run_transfer_learning.py b/src/run_transfer_learning.py index b98bc78..d25e410 100644 --- a/src/run_transfer_learning.py +++ b/src/run_transfer_learning.py @@ -8,10 +8,10 @@ params = load_parameters() train_params = load_training_parameters() -dl = DataLoader(datasets=[TransferDataset.detrac], creds=creds, paths=paths) +dl = DataLoader(datasets=[TransferDataset.cvat, TransferDataset.detrac], creds=creds, paths=paths) x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) -print('---- parsing xml files and downloading to temp ----') + saved_text_files_dir = paths['temp_annotation'] with open(saved_text_files_dir + 'train.txt', 'w') as f: for item in y_train: diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index d35b3c9..2d77e97 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -4,6 +4,7 @@ from PIL import Image import numpy as np from enum import Enum +import cv2 from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials from traffic_analysis.d00_utils.data_loader_s3 import DataLoaderS3 @@ -185,29 +186,46 @@ def load_cvat_data(self): print('Parsing cvat xmls...') y = [] - xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_cvat_annotations']) + xml_files = self.data_loader_s3.list_objects(prefix=self.paths['s3_cvat_training_annotations']) + vid_names = [] for xml_file in xml_files: - result = self.parse_cvat_xml_file(xml_file) + result, vid_name = self.parse_cvat_xml_file(xml_file) if(result): y += result + vid_names.append(vid_name) + + print(len(y)) + print(vid_names) print('Loading cvat videos...') + # Build a list of the videos needed video_set = set() + image_path_in_y = [] for labels in y: - video_set.add(labels.split(' ')[1]) + video_set.add(labels.split(' ')[1].split('/')[-1][:-10]) + image_path_in_y.append(labels.split(' ')[1]) x = [] - for id in video_set: - video = self.get_cvat_video(id) - - if(video is not None): - for labels in y: - if(labels.split(' ')[1] == id): - image_num = labels.split(' ')[0] - x.append(video[int(image_num), :, :, :]) - + video, video_path = self.get_cvat_video(id) + video_name = video_path.split('/')[-1][:-4] + vidcap = cv2.VideoCapture(video_path) + + count = 0 + while vidcap.isOpened(): + success, image = vidcap.read() + if success: + image_num = str(count).zfill(5) + im_path = os.path.join(self.paths['temp_raw_images'], video_name + '_' + image_num + '.jpg') + if im_path in image_path_in_y: + x.append(np.asarray(image, dtype="int32")) + print(np.asarray(image).shape) + cv2.imwrite(im_path, image) + count += 1 + else: + break + vidcap.release() return x, y def get_cvat_video(self, xml_file_name): @@ -216,16 +234,17 @@ def get_cvat_video(self, xml_file_name): s3_creds=self.creds[self.paths['s3_creds']], paths=self.paths) if(video_path): - download_file_to = self.paths['temp_raw_video'] + 'test' + '.mp4' + download_file_to = os.path.join(self.paths['temp_raw_video'], xml_file_name + '.mp4') self.data_loader_s3.download_file(path_of_file_to_download=video_path, path_to_download_file_to=download_file_to) - return mp4_to_npy(download_file_to) + return mp4_to_npy(download_file_to), download_file_to else: return def parse_cvat_xml_file(self, xml_file): path = self.paths['temp_annotation'] + xml_file.split('/')[-1] + print(path) try: self.data_loader_s3.download_file(path_of_file_to_download=xml_file, @@ -234,30 +253,43 @@ def parse_cvat_xml_file(self, xml_file): print("Could not download file " + xml_file) root = ET.parse(path).getroot() - im_path = path.split('/')[-1][:-4] - im_width = 250 - im_height = 250 + vid_name = path.split('/')[-1][:-4] + im_dir = self.paths['temp_raw_images'] - frame_dict = {} + im_width = 352 + im_height = 288 - for track in root.iter('track'): - if track.attrib['label'] == 'vehicle': - for frame in track.iter('box'): - frame_num = frame.attrib['frame'] + class_names_path = os.path.join(self.paths['local_detection_model'], 'yolov3', 'coco.names') + classes = read_class_names(class_names_path) - if(frame_num not in frame_dict): - frame_dict[frame_num] = str(frame_num) + ' ' + \ - str(im_path) + ' ' + \ - str(im_width) + ' ' + \ - str(im_height) + frame_dict = {} + for track in root.iter('track'): + for frame in track.iter('box'): + frame_num = frame.attrib['frame'] + + if(frame_num not in frame_dict): + frame_name = str(frame_num).zfill(5) + im_path = os.path.join(im_dir, vid_name + '_' + frame_name + '.jpg') + frame_dict[frame_num] = str(frame_num) + ' ' + \ + str(im_path) + ' ' + \ + str(im_width) + ' ' + \ + str(im_height) + if track.attrib['label'] == 'vehicle': vehicle_type = frame.findall('attribute')[2].text + if vehicle_type == 'van': + vehicle_type_idx = 2 # say vans are cars because we don't distinguish + else: + for tick in range(len(classes)): + if classes[tick] == vehicle_type: + vehicle_type_idx = tick + x_min = float(frame.attrib['xtl']) y_min = float(frame.attrib['ytl']) x_max = float(frame.attrib['xbr']) y_max = float(frame.attrib['ybr']) - frame_dict[frame_num] += ' ' + str(vehicle_type) + \ + frame_dict[frame_num] += ' ' + str(vehicle_type_idx) + \ ' ' + str(x_min) + \ ' ' + str(y_min) + \ ' ' + str(x_max) + \ @@ -268,6 +300,6 @@ def parse_cvat_xml_file(self, xml_file): results.append(frame_dict[key]) if len(results) > 1: - return results + return results, vid_name else: return None From f9ba589fa2b9d50b3b685786ad5d6e508acf8629 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 13 Aug 2019 23:09:48 +0100 Subject: [PATCH 54/65] got rid of print statements --- .../d04_modelling/transfer_learning/training_data_loader.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 2d77e97..7ab4dbb 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -89,7 +89,6 @@ def load_detrac_data(self): image_num = labels.split(' ')[0].zfill(5) impath = labels.split(' ')[1] folder = impath.split('/')[-1][:9] - print('---- downloading images from %s ----' % folder) file_to_download = self.paths['s3_detrac_images'] + \ folder + '/' + \ @@ -194,8 +193,6 @@ def load_cvat_data(self): y += result vid_names.append(vid_name) - print(len(y)) - print(vid_names) print('Loading cvat videos...') @@ -220,7 +217,6 @@ def load_cvat_data(self): im_path = os.path.join(self.paths['temp_raw_images'], video_name + '_' + image_num + '.jpg') if im_path in image_path_in_y: x.append(np.asarray(image, dtype="int32")) - print(np.asarray(image).shape) cv2.imwrite(im_path, image) count += 1 else: @@ -244,7 +240,6 @@ def get_cvat_video(self, xml_file_name): def parse_cvat_xml_file(self, xml_file): path = self.paths['temp_annotation'] + xml_file.split('/')[-1] - print(path) try: self.data_loader_s3.download_file(path_of_file_to_download=xml_file, From 2d89d5b51f76fdb1bc6b437ff62a06067edf5a8b Mon Sep 17 00:00:00 2001 From: Jack Hensley Date: Wed, 14 Aug 2019 07:40:46 +0000 Subject: [PATCH 55/65] see how transfer learning performed --- conf/base/parameters.yml | 2 +- conf/base/training_parameters.yml | 6 +++--- data/frame_level/frame001out.jpg | Bin 0 -> 51456 bytes src/test_detection.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) create mode 100644 data/frame_level/frame001out.jpg diff --git a/conf/base/parameters.yml b/conf/base/parameters.yml index b674aa5..fdf3096 100644 --- a/conf/base/parameters.yml +++ b/conf/base/parameters.yml @@ -37,7 +37,7 @@ data_renaming: modelling: # obj detection - detection_model: "yolov3_tf" + detection_model: "yolov3_traffic" detection_implementation: "cvlib" detection_iou_threshold: 0.05 detection_confidence_threshold: 0.2 diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index c4dbaa6..7b4c2b1 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,7 +1,7 @@ training: num_batches : 10 letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. - total_epochs : 50 + total_epochs : 1000 train_evaluation_step : 100 # Evaluate on the training batch after some steps. val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. save_epoch : 10 # Save the model after some epochs. @@ -11,7 +11,7 @@ training: warm_up_epoch : 3 # set to larger value if gradient explodes num_threads : 10 # Number of threads for image processing used in tf.data pipeline. prefetech_buffer : 5 # Prefetech_buffer used in tf.data pipeline. - trained_model_name : '20190812_yolov3_trained_test' + trained_model_name : 'yolov3_traffic' learning: optimizer_name : 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] @@ -31,4 +31,4 @@ validation: nms_topk : 150 # keep at most nms_topk outputs after nms # mAP eval - eval_threshold : 0.5 # the iou threshold applied in mAP evaluation \ No newline at end of file + eval_threshold : 0.5 # the iou threshold applied in mAP evaluation diff --git a/data/frame_level/frame001out.jpg b/data/frame_level/frame001out.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ccaed0d107b8ae101d235504e3b5588d98536400 GIT binary patch literal 51456 zcmbTdXH*kk*gYCVK%@yMRfr0LN|O#!Vgp13taOP8(u+t75Qy|n1Oya>h;$Jmy#*qK zq9VP6lq3`>frJtQDg5($-?i@jazEUACo8jN*32a5nRDhmd+%q@@zn7m;LLqPV?zK7 z3jn~v`~Z$;0e1i=SpHl7Tc7xEIm!Cpn(gFC)|2dP?Ck&Z<={NU&cVsS&VGvP6zA#x z7UmW=*Jd!En&&tYdG#j(U%(KIo#{nn#*#s^s-(nYh zYhQpX9I|1r&JSLUPq-LJK)ut1&Reh9pz9hpfkY(MJQ8&WcMLd18N55WO#C!F z4as;=OWZY34eM25uckjJcp;hYdDGL#bZg>pbnCn0|j#JkYMXo`=S36H?;gH)Vo z5h|}hMfU+ONpfyVWN$ldqXiucat3lhUl1qbb*~b+u%Z9Z{7~7_4QWUm=K5b7Hxp%N zmlQ)Sabee=)M13<0HCIyIXyOzHEDSE}_Jt-*^!`}%e2r*cfGPNwRl!XzeJz-m#;x=7vh?{y z$B6>U6QbiH8A;WGJ|XEHboy}pT^F+?Umf+XLg{!(%^qe8`b>4Jj(0U2f}$4U*YpZdPTyJcRdwnoQia1Y;N&Ld7Sb(8 zB05}?I6}$j3KxZ2k3h9GqaJfgrxd7UDpp}%14N}h?GfywaD!ru(LwB>1SEEs!MTR7 z{??E6K=W*b(CmfAe*co4J|oXCv28ew{QXozJk{_TA5S;c-U{kW3BcLHr=H-fnuu@= zskrYYR2{}hziSN0fYj6HgSKX4*OCvajGBO=$PORCEZFoiI!ets5-$l=btN{?USAI+ zC1;GZ0m)Q)9)k@kLn+1i@K8KDHFC^izD(H$3lXimd>c;f{0V&ZF=2^nv64+y3m?Bx z8Jh-0*YtK3i;<222@mzz(DbcGMXYpl;5B8MoH@Cbh$gU5Z`pYugf7vub;BI^;`cso zc~;_x-JAa|)a@+hgVjI7-xTFrIX1%80(viq!0Kc-`=}Za6Y`gAY)WG^R2v(1ny=bc zu~6n+)jRLn*gMh@54W9zJU=B)(^HQDj^OA8QCgVWSITZjipOt+ zy|eI-=P0bB^DW1Kh+_be#1|zr6BH{*)}lm}L0rsJ@AbSz@Ood^JzL1851Y0z?Yu25 zZ!AAOXfN@h3eH2%J_fj;Bij`wQ9-ha3}IJI1QAYQeTe=ZYpS9oZzt1V^+QQ(75D_~ z0P8@bPcpb58w{aI+GQXcN+OJGz6rcwPjR0ID+Q5L39Z@51kql*)5L{1Fc-+nhs}eb zPO@mEiqK5Qm1tgHz*F}9%yAb$vwx8NK%%sFdNy!^JJ2_8&QNYsiE?4PLn#@^4#T=1 z13ITD<~ScNm>Y~a9$lFG1_gH3VHJIhp@rqcpRMHW2Y9Xd=5@JeqIziqt0xci`#?2_ z%^_Y8HXq8$oRtGgj{$;UMOcTUJ_nSOD7<;Oay|S!g>x}pCbw(mtg5*O*2HQlm5-nk z^gfoe30meCsnRU5v85^xS?-3W90S;jS|*C=@Jf{4=b40oO1vCb=v1F0UBWTnQV+D2 zlRD?#_KA9)wzu7)$AxR$Y^LhlGWzqcjP8RXCE0RZX@cRenyQI0^)0RSMbRya6UwWj zGVKrP`A|Wk4?b?QLJ1BHpn@no0Uij^e@6zRYmBoEiC1IllrwYtuHCM6isbmbG&u=8 z=ihI6^B556Pl?0K&|d1@?^|peZ`%t7%9j_ld_M-XM_FQEH5^@FK?(^cH$mBIX-;2UD@e zIA7-r+-JHy;62WtO$C`Euz`V^)v zJfWB_ni{TB>P?imY&sR=oL$E?DvsW1RW5q&JWQ6@64QMEk%jPS}RapDv5JrX!l4J6sP zxv^-XV;V9rfE!k%DY!RN?-!)7DAWF!l=X0^&#o7|BF&8%r<)->BKGo~LBCP6csK?U zFz17v?C8@A^OW~5H`vNJtFU~pQA6JiSfMeen=a z0U3`z9{S6t*mFwg=#C0$SG7-zXY}hQT9f`O`RAuhe|yitA77n1DEoBg^q%@omm(-x2uV1K} zx6QqC-?K(bOR8adg>h6DOnPtJGoVAh~8En*{Q9%NSDVtR1z1W5vL{h=T8wlpC_4 zcFxO>s#`TFD1Iz6xRGk6`u&r?UYOjXc{KCZ)hW$<={W}AT?odco`nZgcdjs%0Un!) zs}0Gq7fjn6?#jB}{$?)k`>wy7gjv*#hB+^s{tNY@=YU)($gR2xz3dkMoZ@ejhLY_2 zJAnxqzAVFIDW$rE3)K3Zulm!E0`yiJa=>5E2*3W|HUmC45qll`)nSm3F1Qc@>}!gb2ZF)o~qyo+Dniu-c& zvLMB*ZoFS_2~cz0idFxelVhVgy^j=Jvj;XcARZwcs~u0{w;e@$SL81J^U#+{^V#1R zuN_^1-n+e=#Mkl_(i2RpSoPw=%Xaa|^`dzl$E!B?bp%6~=8r$DE{w#9zyN~h-6Y(Q{N=+zQyZQEM0q{!DMt(%XM{XR+h^~lXs zD%SNYc3&L+tdf>DrNt1ELRKkLCOh*b>4i`q9C}9FykS^T?a;KOdU4i;I0^~0wWa3T z_Bp8cGoM1uv}LszVvyJKuXaoPTP=LO@T8-yG%uM9$@jAF*EB{9XL$aCry7GSJ$&@u z_sRe0lfFM-CI)K#`EcFbl=~}6vIZO+)$^!*b$H_5WW%LJ)?YmyoiyXhjgawlSFvvj zJTdP_XNy1QzA;OZbC?AL{K7XP)hD5xB=fEaChZV~-08KKuWrAg(f(LLhpK?hlCy{u zO1;_R%pxv-v*TpNyIyHt^De|j(J`QE=NRyBqRR8WS>UF7DdegIJQ3!`H@%C^Z(&F# zb>_qtsE>W`Q)5UFHyel4yE{4V(Z&uRBsaY%td^`RB^Oa_PuCsAb-=}^JEQz3HN#n& ztgyExjP&07W^Bn}=QNo>sV)dJIF&=~wO5Y2XQHJq_s{fv+PcPube6Bx?49~0i&tK? zm34`RuZCwgjA%&RYH<5XpsOu0f-icx4sC<)<8Y? zhs1f(-uZJ(#O<4&(ZB~!Xny68Ld^Wt5!xM`61VG2R8K!bB(HQacdkW{n z(vx@ppaOoZnLUB48qar-;rB@JPFP&NOyet}cVVghx^_&q(>rwEs=-?CEwFO<47hr` zdt1oZ7Z2uyuv6{EUxd|&16mgVXAI5N|57UJnHXw8?4r-fwHx7iRi)81b#Jso4Huwk zTz&RKF@1QDR?|35AtOJ>mXi*FN(F9rc{{<3HpqukC67M=@_I*~(4BqT=V^iTRB)%H z#&CeCV7FLYw4z$d{aM=@UqSI2%>YFPzZZv!(d4R_qM}idKn4jLPF5l3uE&AZ4%vntF+C!57-D{u` z_>ob&!9DW3L1+M99k)147WgD5$ScG7wLbeuY&{kpbJQ_nP&%~-sS59FWa#y+F~Hdr zPT)=zUl)^u)%7baBI2UOZe9X$U`m>}jsc*Y*u2%$_VOIEmy1OlM#;g0Bi4H>4W3nN zJ?mt9snk-h3D=B{my{U^ND&x(;1}Qy29ILID{ixZJQ7Gp7Ruk~H7U^q$0q*nix}o# z5HBWHx}B`v2-5WOd!IgYE>)mOdGV()==nmib*&E0+OOj)Y5h!p`SaO}eKyg%9Mm89 zL(6i(baTUCBLb#uehSK!y_sD*n@~?syjs+lR|r!;I{f4=O+0jbNNFv%g3OQp5YtRm z?;Z1&XWO`Yw3PUm=TC90Un}=F<=*>8CXCaZRrEM=U`~fu&@sS`68E5f-l_M!!I+!{^uRB)T-NK%m2qTQLZpC~bWh=wvnj^_WYa#~k^Ghv*X0vs zGuTN7Kc_lO)M*o7^_y0rm0bz7{cF^LdYll93xP#%g3D-X@TXMpw7$-BJXbRQ{@{Mq z8*Gl+h}7@^pjK!}a-pT|7yz7Tsl|)z!KUxr93PxvOox%gZJKNG1HV##c2j?osv(|w zGL;4U!4?|z);nWCx>8tLX$OK+S6jdh=;=U2KO3z+2I%kp)#Zm%@#=VB{VFCt0M3(9 z_zr~gkQltw=J$+}{pwZew4M9oYxmSH$;5qr@haIRbsP(>cdqusP1Pk;o-lm&_|98B zbMuiVy>wfk>DiB)?dRy3?cyE?7L<;ouU_WLSEcuZT=qsQ5oBM*6$yQMP`(Jo1HTXz zE=@IxsIN4CFilNR>+5!jPo;?B(f3J8?2JDy#>2Ww_~+t1n$2x(K+g+~3~NIMdT*pH zv;%x5{5r(NvbAP{_9QsC5tSFx2cBD3dxVDav@R%Fsmh1>GL`~}JxQd~ULHL;CJM@#p3B}_(ViEyq#J4zFx z@>mQ`x_*l6NR$q1D`AYcUl~%^>(}gA0HA_`bY@;2SP!eQ_&L2Y zyz-~=d`$ey;s~xt5sQOP#0=~nCAZT3h{=cEGUsm|qoV1s%3_j2lC9eJ;N;xS?}SF# zlX$6#cFsw(2P_$OqKY6-Xjkn1HdwilVQXc6)~{?-O8iSM-N@>RU2cTind zyGjO*P>jY9KphC@oKD<`f$XEE)}+2>ZPDrFo?rP0l!TlP7D;&aZ375YS2_bVA$n(G z2iZy^7nu@F!gfHn-~*RTNfWmVtjf#Wp3TRA{D9rmT2kMSkoG8O&SdoGwzjt6)I9Qn zGTK*j`X&4raK7vd{qv>9S}^-`-%G`8gQy3bfpcdKYw$;;S?_uuELU-Aax88&_!_OGbW@PUH` zVW_rjS0&J5HW&-pRunX$D$8tLXGya#U6Awo#Z|X;jYIV3GJ}g|5!H)0Pg5_hOEd6z z~VDbA!W60ecmk(4EFM*4@XIlb(abZp<&$ofd2zt7{C}ZFl zz-`!8PIGM<+t>aBel_%^M)}X7_L~Ki*Vl?HS&8cTR&zLrXl@>(uiM&6ii<$q3C1ei zCqD~q`gX}$;nS+Tg?xpft<+Z2qf`#{X^0Mkvy?dcBdxm%veBKbub|O~_`cvq>UB|~ z5|^WR5mzspX|uNvv^(DFw|Hh_zB!95$BzE!A0odg4Pws41-ppN3aShf?`xV+?|S4^ z$&iF%cs`Cu_N`1y_2A4^LAwJ?F>?0)zEUz1uALc%2H@tnY)3#?5n_L8O3AF-X;0Mj z_O|zl-t)?bK8R0ap01BgP$jD*KV2DFN>pp-r@<)JczC?N;4q5w)z|rc zg^4ny&nD+JJ`Cl)LJVGRe<%j2PL4c(=AL#XoQw9fCXQ;h+AV~hp3vtVdsMGeIlId` zl^gqBk>lpl3&FhRp935~sv1p|=!0pyp#wfe@#-cgIh5NemX^7z*ctm?ULuq<4c1-M6=fovy1bSCh$m~k8;5kJ5 zp=;DDw5t>zEIhvE?2T~()As#JV;#@O?Y^P5mSE z8`)dJ1UxY$R1`UBGD)T<%JpmBFaZ*5tragd3fjKNTbIs}U-e+Qq;dVnCk35e8&GQH zmO9U+dGf-qF>i$`FLiN;vqKNy*@n-YpTtUgTU@|jI5PI4szToUmThRDI65pbHf`=G z{M~9$F8*UF$J3;OJB6Bhlz72z(f>4Hogqro8d9?=nnZ3oxdreTdfi?ZY6>#Qvh$u1 zN$2>j)$eGLb*^>9XQjWqr2=oEf0k$%hea~Ue}|5)B~h5FX-$b$=Syg0>J$r{)9a6o z0?Mb?CY3*sHzlFBQl;4%PduwlW%N; zNpsFWqoSDIj@*@Jzjk^;d&WDAsfy4GKu&ewDOXT~Y>d7rrH~;sQAscU>dqo*tTyYb zkPvyr-YWUj>cJB0+kvOA4R0W(m9Q>9KvSb+TMClsVNP#U115y@!b^iUrEQlzij};x zHr&gk-?bzQoRZp>)ann`Qn+3&KApct3OthFBFB^3`g?NKNA3`l3`f(i)MEO}ZzMC~ z7DTEV@%~v^`no1^b1EJgnvwFGg^7QWA6vpK*hIK7L2{p58r?CJC@*ZKBe|TE6@`O| zueF>N788ywu4@^qdUUd9WWUNrof{(^(-Z2VKVD1XrT$=CqZjTGykQA9&SgeRo*(u8 z=AHXE0oRf~`6P2WMjWyvXLJk*SY*TxF+ody&{~$9%}uaXhf5TpKXNi zn=0L{ZKh!S7B?XNTDQe@(!9=pt=c&;TcgRHTrF~L1gb$JnDBak>>4n}LiPE>i zUHYOE0Y3Z`Z%j`%YMyxjdw&IJ1uJE;HCMQF)tt7@4V;uK2KI$7IhXIR%fnWLqr~e^o{K&OZ-` z3%)U#!A?6#3L>2QLQ_PIs*3};HYBBo{pAqxIaO`EjV>jBd)c~#`RA*=mI5CtlB#$YKcwF zpH2ExA%hVxS1#DJJGCR*=6nb&#ed;ZY0;k8JW2w-D2q%sS(Ml8(}ub>BiBWA&$~{-ua2Xis;Q zQFi_NRqoKQ@@npHery*!Q`>^O6V1Ee@kcnj*>(O^qyv=BT9#AZ?}=(}O8Ya47tcle!0XZTqNY6HX{F0EgO zn%*F1H9QD8#6c*4Mj z=+c$)tgesRy3J?X>%%kpH_0{Ed(APA@L`q1+d^88w4J!hxB^KBpAR1l2T;N^WA}AD zY!T-uk-OA8o7lxBVT+nSUsT_L`vB~^Z~j^iu9z4|ZK*`V6(|u{50fi%L-JV~4T0~X zhVJVQ4oC-`#peqI(Se>7i;L8~<2+$6G5Uh4 z`maXqC}t_hoD5f@@0~L-re`t}vjQ0R`~!vX3Rmb-VSURByHLI6w;Lo=C=FW^F`r{B z8cd+do;8!t7Eyu9P}yTZ%)#e|x9Ynv2M1X4!ormCHuyFLXh#}Mp_zs*n)*?-tftY^ z=CgJCeUeRD=k_(3*n2S#eA=If@9r>PBF(lpX)i^=lf9JzUor{ie-Eob+77`MKZ0tJ zAIm%vXQt3-bma1O$}snXu(tKTTh>I&m=2(zR|IJHOf+eoEzB6t6M)9rwwJx~c*w1B z4DhJap z$Ns@f6;|^+bomFx>$El)HF`~7j{()G^?0StV}LjX&ES8z+7&Km+?OW11q(tSzz*hu z`3Nq!ZJ`Y?Kh(X&EHk)}6~9V#&InE@X=tS%18AXW8ne)BeK`4*@mq;0mBxWj$&A$3 zp?HESO<9^+b}upyo`w1{R(|i!12Y_JGQC&xewEN(k_O@7eRzI1H@3XZ$!aIXg&LI4 zYjf&W=D(x-w69mHy}(wjwKcV^xtm}oP&0is8+{BgnB-er8i!%8-P8pKk^4*yO+t10 z7j_2(sYvR9rN4ycQzAThKNw|IW<_5D?+s==vd|qH3l&=a({Fm` z2a|z$2izrT6e0+MdOtN!z8*v|D+>Eh9uXHJ_LYQ9zk*Yq5I{3Sf4mKTZL%WhLYb8T z-_|trWm`#d#*9R7Gk^4<%d%sT!ZV-!E1UY4+!)%jfw|1672*|;B8 z+ZJk|&^G8K7B1`oBz|;HDy&~#WDEswZth%qk7a7HmK!_9Hi44zjTzlyKun*P)Pb1# zJkmvB9@41#H#gc%k5m6Wac%As+;3rtj?|Y$X}nvX(fH74*-~FzUnaDB{;>8E48~Cz z=0bA8wJpSK^2U@Kd$^S?h{R;(4fjkL)0h5+(GfA>0rqP=q__@-_^{!3s7}bdO7*Ab z)=Z6C(qQ1{F<`s-WN3T+9*i(fOzNIjqgUy((Vm+p0G;&DH)InmB8S3{0V~P&CbA$x zv28}cSw;UR&*571f@6R?7)TSLSE(Um=jDo!Bt%(`fJlh14|4iw$4#6+);{1v1+ADE>T z(yrYII&y~W%BmPk2Ok5n5Zaa7afq6rs7Zt2y-FKSrI_*L?=1eR8}fM@x~4u3ysyd; z3d_|a6ifU;Po?-1iY+nTbzTxI05u-2=h|+l{PgA0g+yzoM9{ork^P)pA|-+9z!(}1 z|08BZxLEK{?A&l0X>5jL+WT8oZ`yLh_95XIV5%qg4<3VNnzafjQHyi5b{jr!Yf4Vo zg=pYEYFG6>kRGy&hzFli%c7^Flo$Wur8gw?nwqg;gKfbcKpFB4A+Z%p3@8E2Pm^HQ z>T5x{Db}2wo1sKM^7&^}p-YVChQzi$@j*In0v3 zN(=-Ke%OO2AZqAw1X0`vePzlWyitTj=T7M1#T0pOXX?Qu_tTn-EZI6%ez7M5ODK+H z^EwI$rqsp6%CCq?BwZU(%T=YYYgk z7RGV!`)Flk(Zk*f>jaU;`9PSuYAZEL!u0hw)#qjzZIt~*rpW_(lU}6jSV}3hYOM2; zeK5Le4!W1~@6dTc@m9P{Sw`X^|x1C1Bfmh}qRD_3tO0CpQH?J`@O+w|=dYNiK&v ze0~1!2nkoe+J31Y+jqE!=9w|6u=#h@A1R$BY=-|S|221dD*Y5;GTE=iym|cm-OzI| zXE3L=I4q(4Br%1Y5GJwQ-;kQ|51Z|Tm=5CZyc=L)@qEz>gix3ZBcdO$0x+=ugN`TU) zQbNnQ@3l?Y?pnEqBZOR84}a^aJpw$0nZ=2Z=kDM@u?i?_thj`joJaH_YWB03=8xZJ zj2`7Im0G6F^ahIafdzx^&q&gX!hbV8TIQWhaYB}a#N_eWRul8{vu6B5vho|byy6?2 zLDvPes#HaX$WsgOtLOUCUE+3lNh>^J11#@>CO!m9P=YXxA#1fm zRb-{pwfs>OOEgr4($B0UTlUInMkzkVV}k6tM%8n=W#FT{0jQx^ zn^N)+k!gS;iem%%ySFjm7#pDo-#2BlO~ZR-UU{7ji<~Jex~s_Y{ZdLF*^6?HB-q7+ zg9D*5L~?D}jkgCOsVOIGu1l~u-@bR=QKs3Phjy3LcF?&HYqNAT?}K`T({bI`roGeb zkPdoM_aaP+3(-51-6g3;idlvyyF_)>+I$1^@8t<~&GI09xbpP1U(fZ4{@ufsJU0CV z?Uw*`g}FQ(RSy+^&e{ocPY`CvOg_*jByr!XK5Ev{+lVIdehokfL6q)I)Ol21&`0}` z?T!J1;*iTb%w*jzoi@v8p0p$hJ|X2-ltcPDj?4-bzRp5S%x;9uRj^!}`)eE~-h@w; zN<>+bF%&dzAueB;I1BUQl5-$Eo-`lP_`cizOFxm}l&8OnPzsslyPum^EO$QB;ZGPq zSDQuaxr0gwG&8q&O{^-T%;QE?Dy6w~3_Y^zvGQ^GZmO~@Xm0>Cg5tYRud4sW_63xZ z^jy5@R*i=lx(zoMe=@yEkfbWke(xifX*r%bl zD&2#`au-khX&8!m^!EDatqUQolKVT27#}Xz7U5LJg}F70cJM69b<}j=%}VE>Ogocc zL>A83&#$3*#b)htCcO*F%#HysTB(PL=P=N=ntLD{$}F~n^!^Bu{0!;<(SQsrt9A@1 z5GFwsLRiVON*%-}m%j^EF5hE*zXklN@&ga;=PsPJcF#I(G%YcAZv(x#+XdPSaQFp+ zvn6oin0}jW4>1p<6(?N_!B9y<1MG6e^i4F*k=jiVV5f6wB`l{jkg7)^vm7M7>#Fr(j@Q_5=XIP)4qehy7g6xln#)WzMyOyIT=ITE$ zuyy4;pIgT9A+(I8kQVf`XG{bOPL`CPM1``9EMS-O(@JMkWVWncjG=xhO2hQ^HbKkp z!}x3tC&_*z^~iWM(=n;=kHLpxAMDDm-8j)70{eXbLI#6u&V>twBC&YpoY_=kQTxXBn$R40Meojl{%} zNIi`qE($c~{UmP3Da#^SIO*O9;~Gt)MX25F`!Mu5gtXnW)VOmtd2xU47%-u}j3%0x zEYvW1^CjCLE9RPAHshU_yKK)(&A0zWYW?#lq6}B@@zeb4ZaC+uVz59CkE|fBHX+;D zIm^D#P!Io?y3>5`Z#{e?`nEgBx|QuCv6PVIK>xg@D~GF4_0Zdntz%YRLhU^_FJ}>| z1P>593w*x2TGaaW;eQBzjwssIA`g$?pwM8?1A+^=LlePBV8G=eAk_Rmo*_$lg-3GH z%t-2yvy|KO{c21h-n-9;bk^2CLcPBWvA&Cjf_yW)m#fg7z*FH@AddL0xJA$mYp3KD zrlcf(Ys@T9FYD{Y&<{hwzPC8%{=`U&8?^}GbsEwnGE@etd(ErF2(@i}bHDN;;Mcy{-5==#a&h@*4gs`Lf8sa7& zp?zFjZRt1vt_IYlehztagIwtoHGj38lB8bQn@rO#>3~%{$eNcFA~b}eRh`!!0e*2d z`z7PyHp|E-CKKTo-9r39e1sL$MAb@Tqo&x~TYhkTy&vzjFx{7q0YBKx$47FKUQrE0 zVNv1YJW_YtDN$HTKvLV|3r;t^zw9JrO=!MRwGSoEbmF@;ZTbhYi=NN_JxmkdK`o~3 z0)N*bVV-(&SiyOS@?LA`8bKZ75>{yX;eoRBOTFJqj$F$`5Aj79(^(QRFGzvo^!RKj z8Lpo7Va}qP^`V;iA}GY6+konG2vlBT#94BDR|2?+br!@6WBMft`+GBhICT)piq}Z_NU|SR0abi+F%(ywaRL!o;J>)r8#s-ebVc zbNCgltHWR4OAOvmy)?%sI8>)}f>KI`6UjC0$~jC(%jnJXK1u#6X|(&&YODrdqrApY zC&Qv+Wr$ziacnE8!jBd`cIB|;lY2mcTZ$iHVt;McCXnPZhn=~Tt;VyzFx*7g{UgwA z3IfNYI560C)Yt_+=hm=>ovvpg=joa*%^I~|?Unshfh^JFt?!oCNgq$O%6F+IGo$w} zybO=}DV?jtwjnILw$hjLGuuEAs!y@~62qi6f}Ccjw%eNqWAs%>@L7~#;Xj6IU29D% zh4rgDAnZf%B(M$Cb{;lro>Nl?OU~CF^+!+VA3PrR+;$EGxYj5O06VS||AvLC>g&fl|2Tl*40_t*nI3lo3Va0`1l^*-VP zM}A2tGl`P3KdS+gGNx za{Wwdc93e9*$rWo6`CAEH={HJ(iBKiY^&7M+X(8*@k2)<)7Nd5-TmEW`fqT)cdNJf za*w9F#$y=aHs~}VvY}MzDww=Taa}kETSP3>#>GCt@x?-KNR!x`gUEw|?n*xAbF&r+ zrq%5$`p&O?XD8vzL>d`FQ}Kxf5+BGAuajcakiwMl;-BgY>@Ha&c9f|5c1BW6Vo<6Z zYWXgF4#H#3Mci=kDT1 zU3Dfn?I+5{2$|&8GErp6+>-xy$g~u^`~LHy6yncdk-zVLiD)~K}2-)1nTzRxG6{rcp-f{LqW05U!w=%wN&#iA(H zI9g~C;JLe-#SxLxw%dE7z;x_QqBngSj)LYgiKGPSRaG8UB79^fbK1!m?OLfZb$wOR z;$xBDC>=i0E=*^#K2Jmph{Ix+Sw9v)d4(2F%X}H71e%zN`b>n@``sp^~VKr_T?wQinV5ouTD8oz}JoViS z4YpS)k<3jt9|KMob|LDF2$sYr7_^{b32`d|YaZXO`ds|cD0WytZo5|PYHoB_sFbLL zmX!KFc(Z=7ka%sCX&5>Y?G?u4Y(bG~)$|ra{m~WUGSj6)DBgK9T%FkX7VX_T#9)P( zEP^_fV*10%ASTakD_J}}OFr2JYm{icq|Y$O=GU{K`Qv+&7$D^e0;BiX1TI9WT`Z|^ z#32O#y|bYpjB|Fh9&6UC+`av+NCm#&$STp2V_ft}CnFJQ;W0n94`f>xZi(xJPybQ? zyW6>Ir2h^~4Rk#y1y(-FVya|K#O(w}k18j6q5LD>;hmnA*iWBB_o|+Bg6i`>x1TdV zK~tg_IZB7082+}UU_kynqTX`-I>+yrlLVK4_m=oVI>(tf44x!B5%kFR7xt950`g_6 zm-4e5W2AUrma626P7!xAz_DPkVfgpPGm1GPU%~Duhs?!e-Yt1GQhaDqU~R^#Db889 zdgbpq?LqC;HFyYZkJ-UQBNF%llZJV0M~BeJnls2mP_6Wi2%lZuLzxATCxvhA%h#;&{s_5WD;2C2@-Q0ZomH5 zSRajTyREZQ-~6t+p^3jj>UH5e(FpeHax<&ortO!7;JnR%8JQTWk87_V8{552G(O)W1S`9c zNQQAkSq&je&!6Ao<_+{;L2D0E`h+tkUKgJvv>hD-($)+g`Bv{DJGZBH$rF@JJRjvg zDinJ~cXEi93uwv7v`u*R&v@eiW`jrZf0d1d8sW{kDEM9*p}a6hlK9j5G5?LZ`{zCd zs|345^p>uHOMu*a3NFSZc6go@>gC}br83rYhY@(-bSZAN8=klz(5~PLWHE}DS7Ka* zAfAb(#Ef8aVFZsR7{r9HnQRQZMuwQccey)Tif_2tGG*FW-A|T04u01@4cL`3XE;vH zXw)Yo{2W{JPalHS5B2^*;zC!x;>31rtU~>jfLr{bq1sXDQZJ@C-iC;gmS&LcoKFwcAvB2Xj;8bmE+WpD>g)?p;h9$ZtNXQpeA5VCh znn2h0NRu<={C~|xOs@Rs1^L98_Na!LoAW26q*o(wK#C%m;6!jzC&*%2;--8ETW=XX z?^l2ChA~rrN&Ut=kon9ug^$c%q?mjTp9Bj*FE7dr$wO__FwP34k<0}Ct4ZJU=4#)+ z5PxJ6%*|(-wHHEK=21^cDx3o&>ei5pruPR;Q@Tp8MTlu!fT9Hc80o3n>rqv2RnS7{wR=$@IP{iR zNoj&tfCBG!S4&6!`oKNsl<^!W%gQw=hsd|SqoYgDRe~qB4O_-*Rb3~A61*S2FgX3E z`K9uJ3g`yigu+EnAo<|VZTFVzUY1E$BganOGkNin_wmAuj1u0t-cn?nlmCu>Zxv;f z>_TxM_4G2aub?NC;}yLeR}^bvE;v8iGg`E=@LQnuwG9|eXlQ%e!< zEI+;RWZrh0Y6xdVW%GlyFK1k9S`yl}(dTA%SRCUQSWpQii$%sVf@=PntRq(o36+-8!n;?3KQdoYlO-ba)dv%gI3mJ@n6>)H8v|&P zHlw3-2{%iZ+3_(ybdesv|LfzEHq~xRvMT*^xH!#;xX|szVKll~K>Vx9sjKH0Za*_Y zs^r#x#qta?xYSAe)@x^UE1{359`Q)n;Co(JV%U8P0^p_ii2wj zR+7ElTm{vkzGOmBs8N#Tcz^>azFsb2;aR@8zm<1UmP>B`?YAy678~26#le#4jS7L6 zKfO2;7`5H+52`DTnZ$t3>N*ra?5(TzLd^wdk&#%JUFlHc(SBjgL5F z{0%*1rw;(CHR)-ZT2VMXs5VnwpOU}kl2&~_`TCzXpM+$+c{$X|ANgmkyh)gV2;won0pwun+#b;3fv6Rs^`A7ecvRZFiSM&$`YjHfk0|O<|OrZ@ajyhcS#nuI40M1 z%8DVubLX+oB_}M%e17& zrjnV0L6(#alHlTYQX>S71|)VrI)R1Iqp4^#Ej6BBe>?F%BFvnb!@t)^K%~aog&_+u%lFhv$8nybP3{rS3`po6L^yJ+`!j- zMwiAl=s8OO=Zc(G=E>s@S=w5~4CYK`Aqfu159Xc7zX%Ge16C9hlDXSg4jg((-CEpV zw#PzjnvuyA{QBX%*&EhTz#C2BviTK$!BfjxhqumG;W5%>JMKrXY5%uY*&=e4QVoYw z=) zzUD||MEjd4cO(a%Z|Hw%768Kk752|^goN`JFJJl@hL6BNNflkl5g7dx-Vc@gm13eg zh0@z&Jv4V8H-Giac0$)GCn`nlQ6)rEL38~#)i8el- znmPiGIq$a$sU8ge@G&^e9QI~hXaahwZdhUZ0mi!70C#p~fBOhza5{ElL`B$^Fms|> z<}g(q0?6WHpcj!`_kS`WB}jsGYI}yxvjVKa@lK*s^Lyjne4*^CP-L?9A}*6gpeobM zNU)?@mIog99_--jcVmP|bfwQ#(eSAv) zYc%Oc4PssQd_Ba&@OFw$&so2S3xg`=b~>>QuX`)4Jf*!H*P4&hlL@Avz8SehmdcfG z;PvDxv|FKR*VZ;a;p@71JLy}&!+Bn|YtY)R@R6W{jMyDW6!ApTsQq%TBn>{bqwn{i z#`fLn%ie7#P7#4z(L1MtgsG0bCcCGZ6ZqZtGXKslA|U z1|gue+rZg(U34zS-{~gE))xn)j2^!AUQAGb0^ainTAcWxwVuPS z+hKwF95+)GD5ArysNns7b2UiudG8zOp|U&;cZz-fK9JphxaL zqSdcc$TC14Jsc(pu0$mCZj{gx$lSRR=7?sE0ItEPS^MHe_x~C(+K&I7nz~<6en4Px z?tv=}7K9IPkoU~q!mHt(%h+Zk{pyc;mR_Z`zh*|RaRkipzjI`^Kpjs{v|=FW`DS#z zP%JaJlzC=ULz!ne(hQ$6?v>25{GQA2qRO!&=L4^G`{EOSZ40$y;+)mNU;!`kP=X{M zy=)vq;coQaP1es>hr8vJ_m_XL-p|uDwfuTg+^;D^GOhO7%5c@>;q7r2Tlge~W(Gdh ztQ3w8>rg(q$~sNcj8~jZE=Br1e5Xh>Qqzl|yuV+;w>3}t@0ICB$DiM_tTUHdDzAvK z23}qrl4W>Wf%TRn9=OwoIGE?$w83sDuw8_52MU6b>5_c4@g}d2+fwQnx3S{i+0gu- zm?aAk&hSo_6!4ym=0RGvL9bSSterbqSF>mH^DtM}?owi|7XmlE%Z7)E{NB4OPhLn_ zu#3?s{=1*G=7)Hy9!4e20optr_A+@MMVP7H1GDP;RqD4om)R*5uxHn>Szw>|bxe5Y zPqU&~O<_NAUa#gJ4#6&x(xE7$##6c`-+lJj>033-mznW1{;+l+nQ z`HmN^9MCi`gJGvRdP`C+`OUnbhig|gI9W%`q8Wmjl|Rrwc46@1ERgDha;(BOuuFMKVXYk)+SP8 z0er8|Og;=l#@2h9EbySSp8d&(I8E0Equ$)?Clg=6M#`^@M378YN@^vWN42ypbxGXG zVOthp<5@Yd$@%bjOimA`w1jdTD!}Q1;thYMK>;vl4!AIMnM#!&-ZdAB?7cFnQy)yf z&fI$(YGt)G6>Kf(<$}v*uE|nk!HwtCVO_JDVinVnu2fx6^TO;*G${)%qcQvC5#iJ; zQ&(A8k)GByXLFkTJR)%ucVP`9fP#Pe(dbiFxEh#wYbnsC&;O>%4-OCPa58Wyqup!h zHKzK>S;rP?!NeO$ZT4WOv6QcR{G4vAo`1LQ1l0U>tIL{BTyCL+XVQ{Bg9ChKeVjfJ zx*JK`e8Y62VLI)fMk4ut?MU3RtK=8vu@n^}T1%^%)z2d}*en@@(<$abTNsbr2W z*fA!knT~4guo$J((Z)*(f%NW~*2i^myvZWxGo*qfN;ZE#;{eJ)(&|8)jitLe|3rWo zx$+SI2(>~D7H46~tO4{VfI$Lb351r_3Dv<*=s|IOxLW#&rtsSkV&-0!w+7auG@jdPmk<0gdelR^~kad?&Oxtm$^kF2aCA@>h4}lruC$Rqw9j5s{=Gg#~aSnr0 zqrkFGfE$?R;GaC@!{|~4yt;4~#xw@>|LU7rjwD4wCb8-bh)Vg=!lmBRo2E_jVBvrV zb$Vh6_(-1PV{!mSg_FS24FK2ZoB@{Im@f}=*9LH^GqaxIzFW;jiceLTSumZ0WXeR_ zP_(2RT|fwPW(m#h48R@6YPiN4Si2A_Kb;LBifz47}gP6 zAhm0Ebh0bh@4{&5VyuvmtEZgKSEP0K4UKw+HkdjCX)S|Jw7XG7$Fnmth!VAlRfZ?b zeiYn%m#=rm3kz(Q199FqCUj_gi7oKj1Ys_y`Ex*iD(szN%{Cjo`2GX$h*#Zyxszot zke;>jp*kTg&Slb1vb^g?tc_Z1>B6fIJ^Thd3h(X(+RL|09bb(9jBGM!reWTwlY+ml zTA#IdCTh2)UtybRpk^|Q0W}i#G}4B3s>p-3*Cy&1D0(-Qo?`3YCv4Dn`fC1F_6ST_ zcpTtIIow9Q#LUMee?_`JpbKJL^v4T5yvd=7>0bO_H&qgoXLY-e#z=&?V!*<#oAP9q z6Jk2v?(iFijEmGSjD^ZLIeBFs~zWEC#k31C&n#$IWOzttLf^Obw(C^ zO%!E0Jx9F^dvM84HF`YdBP9+~C56v+64K))eoOy?I#EyJ;YI3PV|R2~w$U=z zMtK`r@=UWb!G`V;?8OEEO|$IMDU}p!9_im>9Ur5XKdRgQ6GJf#-eb}4)mg=rJlc+1 zf_V-R10ho};6}EA+x=`!=SZT~1Wy_-)C5*t`k3S3_2_ja$QZ|LBpCLU1F>GU;}=He8u;kv}_{ug~UZZD<|^Ps$dB}MtIdNPFhw5f8u6m`X; z0Gai0#O38Uac9|~rVd;XS5lq&e8m0QF5$dZ*M_3Ojhf1Qtb`_IhC_P9>v|Nw)H!oU z%{ua{_7=k0KVeXhi?1aaUdF|Y?)noQOPFmKF`k6?gvfT5AkGYt^q(P4-3-tx zz@DkAqTHc_j_)Qi1??3V2r;sw&xX@SZUr2# z&lWlD*L%+Pcc*ILy9e}va8M)QwtE+#mLf}9iG>~9N?t8z7z{NBzNrB=wsvW@M-d4} zMn~|>BvEymFg#S`IK{44ZFU$b-S_y`XwW;}+(;i-Oq#fH zYRJ(C+0q*YPnUiNV>p?%G+4)G?5vuTe0z{ia@=1HnoR1k%$ld4os_N9A)1T7-!8j; zS2IZN{oZ0_W$`!q(%~@*w04J(gmU8P-~nWwrO_fgoKFt6(1mS1C3PlZnH8a7cOAaR z2I&(1g9uN-FL|9@)Gb#H?;GUs#ClLQ^;T}$!0!KJL*$jis1pn*zI+aS~-%sw1d3@4AZAu zVKsBibFTASsLS+oN%@7#sWQr?;uv6u23-0k@h7u6em4OKV;G~f0SY~XTvuD-MMTHc zHc`D;dHGfX#2{2o;31??I(wEvn z!lQGh1^i6}@>Uy*tGZE87UB-^cs)%uzoTrY4RuT3-(PE{;ahnkA!nJ#3wyo{UTA=W zQUjpg*k&u>*y~l=ht5j&ZKDM~g1%Q~qsf8R!wwSQ!W?MnVb$(OfVXaf!e)*R&@mrChn9U4 zuF;~8uIN0?Ki}@?uv!mwIrh>xd|f=5v`Khu^xn^{*b3*lM2u*&42arQ_;iFRie`8lc zS{ef1GHf#O9phoTrmvYI3l2r1QvRl;TnJ=oR2Y|{9qycDO*!dN~f5CryC)92`*{N$X2?P2}j zqf*x@X^414R#L8ML`WO{3?>omI{Y=`S;27(bI*R*+#L`UDR}hd{UyN}^!xGFx|d&f88k$)~605 z1@QrNe#0-Rn;s85lJ!?LRn^;i zQb}ytf@U5ThUlZ>lJh_@&__0l$EkRtd21m8&Yr}QRfJ-Cfj0AzbGajL_ZabC+iobHlhHB$W*G513$Hu|*8eyW#l`6?5o}}_rGnYU1lLdKv zppCq7o5OQImJD0}6;uS2;oW^hHFh(K))vQmZBI{J+-RS?bF4nsLyxsbY?27BR@UVpRH%qHnlbusPUCsPv6HiR_G&H#d^1VNuO>^8U$s6B1}6VA}?`x z+EYI4QFjg1VPXQjV=zLr1aXXiOU&pW3eqs<)LFqM4IVhx##EGt2VZ)AJ6f>pxlNon&x? zo4@n8;PrF+bzm-+E+rm|V6}uUC3QEE=--xgHd;XaM;&mF`+h=FRXYz$+u4a81w;}g z<;L`2WNv?OMfT+J-j%0?YU=AgZtj=3;0F_@*S1ZlV0$QX~$y}bI0TGZVA02viqI(FN zQt!eyaei=P)V=z#n$1X^y{MWpw!ToZA}8i%sIrV$R@vtZOd1RD%93)4(uYdmiZD86}-auji3QP58d-zmYw9Y60*lcHO z|5`^dULunjC)ymq>t4aO{;@6D2DATTJ3q0o)YZaH62#NREGDkgJ!o$sbSQ%(ZI=O` zxr5o2K3uPD&EX*_;r#i^ffBMw(sNnX2;)RFg7tO*O^5}7w0>xC6KgTJd{r3XGQN1W zb%3dZDV9_N4G<6ys6vVX&_agANnRV%U_Xo|ZF1c6$$o-9#v38vi}>Tu_}a)5D>_G& zb^YqR3XxRL(wFZD3|D@=-9c0ZVi~t=aFQtNdW#uHKZ!3MOT6f0|6Aw!S*>_(Ju!v` zn2^W?^H4s6l!5Vdg%vE1v!IznW)R}AqlAUI{@+o_+xK(fA0kViHO=uZ>j))>pSwfL zXuWP5)`Bf>w$eCkbW4obn%)$$I`u64#7`qVn_Q&Z?km9Pn+73SJqso_hvEBC z|BuZlk~G+&uGT#-oT;i1R~026V;OTybQ7ga>oBFs##nPpbxQ{9jVrNA>B!jr+;l$k!&Mq>`0 zQH#`Oi)!Nl`B{3#)9E6@heag~ZA&A&4eazlkT#vZP5S$#2c(@#NqYfOD5Z%Z*3?7d zP}dN<9WdP?DwF0)VK46p2t)f7dP2Bsn>1sl)TguDo;t+eR;8wI{dVxo!97lGlfhMk zoDdtGdx3V}S)S%oNtTU-k64rY9)#{}QzFK$ZuJ#Swb;5Pwc{?2;*@zn68$R2Ytw;3tEZ**U_SFC3 z#`@9MS53WUSX@p_3y1(~0{n0t!^lzdO3h z@!SK+x*IkY#vRVX^rpK-v&5Ijb(FHeaz3Zr-FKaVw=qP)pMigr9$$ml&j6tU@Kd-8 z_Q<8XCZXo|ipKBNwDi{q61X{dw~W!_OZse^g8Ai(pn^*8!aVi1<8PEpviZ1@ zg+vNxS^w?&{+IEhy6J1(a~;c=#9c`>P>^i~Sfsi)s>0VsZ0(#^{mQ2{KR8KM^)*%M zUhhT9!I;E*t|LVU6Vc6-UlM_roZCf zWj+@oMvuB)Gc5f&4GKW)<>=zt(_`yz|G;Xs2+Mk$pH}M*&O~WQ7>nz27I4 zwbeJg^?egAhi*eCYbeK}PuE=DnNnU?{3BRpPjI<5FtEZugn({fMbCeq?c0`5;A!+C z;9Zh|E1Lw9OPxCKh&l5yJ!5oOoj$)b*yCX!}=I!)Mz9IRAWpjrt za7Jw36BiD9wo!R3Y#*d70AH2fKz0%%H4op+yu9bYb@c zKysn;SdS}imMSk+RN~@|M)|u34ZB+U6Gftp7S((x(pA6j>Oh)N>gR#jd1YRvWZi_2 zX+1qTR*5qF1=&$*lX#XZs{5{8>|n|JoG7>KiA)v&UWHABJ=;73#J5AQXg0nxv7h>D zVs}^aPLkSIh17gGY3|+FgWBJ11+El$^qiy+0bi#S%d!3|AMFBPl{}w0hIY68$5zNX z81{a$RnOz`2V2ZiM}FNS6V}J|E|B8`oL&;A?UFVGyBC|2gkPCG|EL+J;;bUH4oYKB z8c=h-GF^V2z0mAbLR{I^1kE=*_l#$!vlV{V0~pqWiV<#pE1=b;V*&6PK~c zk1;C?tza=wRv>p*+j%LJzAFuwF?7L5;2iwhvhl}t&SJ5K2|nq=WmCp8Kc+zWYgb^B z?%C9lMu;dAKFktEZY|l3HydfDLhDI|X&Rv~Z*kQQ*K9oC*>|@OdMeU-2`9gZfG`6` zp*pM{Ja01sl66{VA}KK@%S-v8ZGK7+Q%!3EC#r+DrZyEgkFs$V7=sdnlITkM9V&X;`O0d{AeqZQqVAy!AsRki)t=};_ z`y~?Fg9WsLT4Pzf2w~K%G!r_V-PmiFAG*_QS``fa$F>H$x7UKE#75%X%!Jm)aTn=| z8L>>W{MDcnx{%7jdxQN6PmW52xKWunubJsF@s2>4H;UjhiV7v8n z4Z@q6sdd?57AJDtRe;We2~SpiBu-CkUBZ~|w7b{IgX1hP)k9p)PQ3)>3%V)C@y|iA z59SCssg( z7UwjL;GEGBe(~@tRAO>&nlczEc-lrU>81_GxYVz8uV>*;P!511U4Hld;VanF0p0fD z;QZ^*%d@Dj-tP}<`8C>;!1y=VI?Hh=0so8}(GBOkFAt1rxMF{G>o1t3dj=X zYJ@r`fNBa{jCr1R`h^|vwbh)Ox32#-+jma1gAb$S1Fzpy9PHzhT*-2 zAV|@U)Y+$!uO%q3B%bF~7wM9Rte`!vXKDuK1QXP?D#mIb~S*UU8T=*opVzv2z1RFI#V(wx2+%!FVF6p{mJ&p||~*(9ide zZNd~9vJJY>S%O~r(v*6!{3egbSxIw$Y(6t(Xy;~|ik#eEs|3euCl-7VIo$itLTBi# zK5hwdtUxpxjY;z9D)80r}N6*9GT22a# zp^Qjt@qOFJ284%pxvAY<#2Z`vm6z7_#2>xpJO_U1V-(-*Eaq;ooZX_|hV)-J6xXAEw<3yT{ibq&1*Ep?!!gax5Re{KOghLNm+)6ed6 zyl2XPd!IVJk&Ox2-u|)8z`9+QU0xEH$cI`Kx5(>f=l^3XxI}=ty$Ap3GVqqZ#UNw5 zvBcHQyCWO^@+PW%tvz2nxpasH-;jKkE4#k*^AyvVHePg&5WVjF{G-!L+a%lZ-T^T^=_FX-OO;J< zSAV>)hp&uf9mW5c8flvYd>I-0kcM4&r5lrfnyd<`Ty69z%58#PUbPJP#}@MUz^cy- zklJLPrC7#GlSOPVU$u}|iw&Rjt(#i6nd9zNK0Md^T@_5-9N69XJ;PCMOly^#ce|-FHr~`_!xYZY5XppYlWrW=;fsw z+9ZGBsl+a*f1gZ0ZhgZ~A^eH=4|3BcKTzNKQaLq$hRgOu;Pj2HZ%c)%UfbIQG|kBR zSM&XYjF@mKPtL_JD(wjE0KU7GonpQ2WAEOapcW$+ahnQdF)^b4#Xa)hTM)xW`j0XG zFBgUFYZda#lmEg@ak}B7;R3X!^$rU4Y-_ImFjFFclyl~n`oJ@Xog}uZ=U}}ntW$5aNRytd$y9_VvSgHZA$l{MfS zRO<0g|3mGEb{{GKhjtRJHFXbpkp3+W#&*VM;3>}OBvafeu4 zoc0-~&W*XXh6#1*EKzN~EzgSO1i(()c}ZSKCZyQYUwlUMIv%E-A5tp!e#U?GlIOdt z(^Yyry92;>*)mQHh;}1Q+l)RLW~q3JTICFW^c0>ZI^6IIQnwqtyZ@v|zfau1?I3d3 zrE#JbmIzh&wNXIxhRO!tD-gGkl34;4&ogam=qt9SJ&s|q2Oz9o?6lOqs*NwhKrB` zqjNw(=?io;sE&$*i{ONr>N>v?6P65v%LjI7uMn?F6{KN){ax)r1ic_E*hO%|Pi}YN zIiXT3aKUE8qk2JmpGvmM_*=Lb=5L9Vt6fbds^ECoD>?KD)pSI)^a7JV; z@QZ`!s5~wJuGYBj<9Bv3thRV~uC%Uk_Ys9+LZz~w0ednIaoo~crrzCH-`V4aZ z39(t0ZN>1oLod> z)q36|o)m5UKy%ARn-HG6b8w5H=W$RU4Lt=QZiWTgE+!pSlDLSNJsTqeZL2VwD7=^X zON`b0GmqBO(@L}t3`tr_>nL|wrlahV(n5>NEh#ZTK$>tU=Hs~ zHEY&6Ag-ewFVbvA8*e8c8xHvX~}3PtS3krR=}LW%&3NGN{24g`R~2bEMkE_@=Hc;Zod42Fx8v&9yP< zNo*3mJm?F`nLWn`#{BLYurqElFVTitho&q$Fx8tKdoc%GXfZFH@4tA9o8LJJ3!Zy_ zo%H(8qss1~-I5miP^5pD!#o&sLr!!aq6&4i5yM*YW91iub$-`)f|jUpNZ)J;YYKM} z@FLBf^u!7LY?hVAz6sDB(CIK6>;9ai%#kgd)?6d@=(6GcL0US!JeazTwtmtLgizR>2G zoSx_C2s7J#N^AF;<*y#wd_JGs?sSuU1V>rl<8KxJlg)og+%0auaQ6yO39b;w4?jSa z)+A5Sdj~v7Ef5Mvw7R072|c>W(aF7H%_gNG)TNnpU!~c5%EH2rh=25SB%Bj~US{fl z1*|UA*S?WbY5#umlPjAI_r0D64~LA?os!=%Z_&8gphDy8G12o*f+9Za8*a)$TJMT0 z)#dJ5Ye}VETZgV--{H=K9+DGGZb}Q<6#qSU+tj1^jbgLzY2}3;5sGTR-It{#SJSke z3Ff(_?%5+?QpQeDz!%h=j%1W9}(M7H}m%}~3HxntVF5~Ht(_MN)vqQLz{T>r?F^k>*T zIP26MhNg#wVfrpq=e3bribYAWt)R(<1B6apo@B~#;Q_hA4o0x`s1aDYKa?8_8u#{J z`>cZ&4_plkMJt6%jpB6CZ7|fW6Bq>OLbRq^)&9}+z4dJkRBz-2@DQKeeyAqwb1}y% zX*m1rfptPSSf6?Q0GR7G@+NI2qlK9egsw!oJxDRu18m=&p-Nfa+~AQy<$yUZ`0C{# zbnL!3#Hj=QQxS&4{$o>sfmga=iINi_snN^>UnvTDjaHZx;6Q~sh-GohKS=LA=o@-% z7}@9Td$f|RLff)h?)s%~bO_OH*9Vk!QaW?b)dHIopjiz~%eqxxHOf>gUMW>VoR3qD zF-E#P3pBd=rDEpOW9-T;1THBU_Ep6i`(YudOiQyGvHlHz-SwI*hmlV0+K@2dBQg{ z?E0<{9;WZL&DLl|4pFI?gyFW3P{%TWI>O)?K6!YWd-3 zae`ng+m6NpMk2r%9*sH0yhJjKItX#k6pG2Ph`&=G=cm!ttOA5uKD4|d#m4)+RU!Bo z#U1->;x9V3k!PwDJnwM2LrfrZ|4*!%Ix|-0uEG{OK_S%t95@l{!zLy?+=9HX=o{`{jmyROe`h^d(q#7T1FGeNpe>A9_pTqE?RgY*xdJXl8d=J%?XV z>}5x`rhq(?g@O;QTgO!2$}eLkgWA(`g$_Tcb0H*=UpJO40vlT1TnxrJmG$GiOV6K) zcpQKgILx^O=E+3;v439wkW2p0(X?Rw;xq(~_FwiuVhY zX%GLgjTC(#PJ%!x2A*xYHFSrz`_%#QCc%k~VUj8Xv&^g%>Il{l;VzFYBhH$3T)aHo z81QGFjQQ-|VLLW=-(k(cl{;agUiG(etxH`jBI`=4RP;Z$(~_#wCiEn*fc2o+xxN!= zeu%=gtJ6Bf-(4!(u|o?J7+}-(uql8uAQXRe6+CbPQC1rt_J()BE!4{gv(;-hAMQ0< z133%sEZLT(Fws2^46v}{dr``8~m`!pOYZT{;o?1Xg39Wp#th^Y6m!^p)? zsXdegp+Rz9=p>;n^#z)e z`~Y|wl63uO0OYNy`ZSkB;JCb3Rv-HEz6Bv+aY`y3o=sUih@+Ru?*0MP3IQILF#Ell zCc1KRMh94S3fe64AgniR?=4RQ7pn z5=#&uZax^P4Q2Gj=V;xqNqW-I5S*zX!mgxxf~?&Q9yMS#mDQG^2|1-^!;MG zU}x5*ek*%gf&#*SpISgP(E$&*s!jnlUO^w!U~g1FQPOC^%U* z#lVuFv1U;_m_Zr>?xsFW_l6lHNGI$KED$auTK?o3pC>lw)@E0*2t~y?Aokv;d9_Pw zH)v&O30)PTOiRqBelzh|Wf+R6RLnqYDT)mKDX&kTQFrLS-{_BaPM{8XKEiLW389I^ z`C5ac!e|CQ#aNvnPOQU7wvQOTR&lvgYTzUDv=@^I9C2;3WVn1PSWjWO4%RfUE(-iMPbX8F9k69 z@Kt;1!L0c|TLJnO$kG~Gr?`=N5&xN~h1SnyDdV7L>|U+W-4So*P=?a~vXdrdq~$rN zk+g~>2A$zs?u)x4-m`_iu*fNj(FA49IS4`h2|Czk+i z+Suguq-Qwi2=-1vn<|-qR_5aOb%@3qyoOLDihY>x zMchh=j9i!RNq3DcG;jAf1c@!DIUW?&uuffK7>4(V4z;7}QJP@EU(zc`2zqf;TE{1q zVl1-A%M~H@>dVXY2b!-HpXW_!tn#xBsTqr9I8_BBoJU`9(lg+r_Zw?}f0Et>Mev5k9^n`&OJXpE;fjb0a%Ymjf7}p@ttHAQTp%1Q z*($dwpIez#8Ar*}Q#<^ONoa9u{JvlLfuGd-*D`Uz6A`BDh zUuoI#VIC!(^+JJrx8FjCo?dw%5Rubl^h0lh8b^aqb5XG1?>*(_vN_BwrK%6f6EIw# zMo~rz|JazSih{ZkENca_jT4) zZ!jS3#)iM|QP>J7?El!3MF*R+JT9etUex6|wGw`{vLcQ+{eW*}Krr=Osltt|vOJTK zh`DUp?J_58#2dpzXpZm6yOmAu##h_21`+3z8PD#3#~tthm$BXOqs*JMiC1WnLE;c*3<;Oh7Di7MUc^rOp2Kahgc{!izIj6TuC%#C!_~>`r@w7ue;G|kB}Kobjb_} zyo8@ZqxQiI?ociWx+>VBuVU`+S)Rtie{AwF&4g{W{TOS-!}V<^b!jL2L-~#=g$wMDVWK?<{O!+z<#~mG9>2HzXnr8cs5>3sqIGRaH4x zoPSf$fPzqWGI0{gi{zos@;7~_`>n9#Dn_2$hf3JoZ%8MKTvS!l{Q17-!&5zkHb)%A zA1=UDgio1kEjMI6hK5^$`67!0a=dL zJmqB2`RIvSO7hCz$YgHcit6Onp)snt%zh#vpvP1q_+DG&PeEsk)C(lg#*#J*JNSP)ELs z=YC8lLW?gQ_6DjB*GQ%wrw=;lOFvDz`?Fa-zsK86`*>BAj^RkJHpgOEq3rox{{yAM z(zVp(Kx_V);N!O^PJO>ECGO+ezQiG-U(Bs_eN35oOEy-WPhy}w3^4yt0iC~sJ@+#; zG}mfsYSwf12mp?t0+c$fJ5?j6=Cg^C)rW{-#}ige4t^V>&`QgXX6A97^{b7rCCzBAK8kTW)dZhmyjh(2_Us=iYBM z%L8+>4{RJS%RRGr!=U=-a|&L3p}pAbw_81I)(xIy0b)<}-^S|`qII+01WrF?HlSKCwh^~{9UBUO&^6P2)PB80VJq4FB*Ms>m%MBOB`yS{}K*~xIu}J$4{tbwo!b+uO5V% z1Nl&@uKs6+`WM`be{6pef;A3pnRn^|Us!BG+^Ss^2>E3uFQtmeIY@B$Imd(gI))6& zb-Y=GuH2DsBPlgfs1wjq6B1|aF5}xE70T7m9U_*U=-Xo z1MHP;xmgxP+deCZ%}|}1g)l$vnKAk6yM@A)hMUdkXS;1OY%)S{Mf}87A-E=(`O|R^gmdAU$iSxfMy>xwpp7B^Ed+&4Fy&ZaGiJZ?Vm6Tru8(EG* zg#u>zUM2aZDKZ*rAdqT5UE-9r9w>^9*MRWZ0qthTru zc3-{6gaoA;1b>rJ9#ThFUry{?_0M(Xj1UiVW%vAJa_KNNl|J2&edw>o%t_4QzQGufAiHPx|GpS{_9xPmKd$7+}^%BbulaCCl1M%-!*I|shaA0 zNy1Mewa2*-al+^cA*P(L+Vewrw18$^^Mb(I`pBG^O3vACoL(; zn)CnI{Dz8ICuy;u1)nvSd*QIA7V&1zi>AetN&(T(ZaOk+9kOh_oMIThIb}iNZ01QF zZwYlc%*I^=@v95j=?U@+7}0d5>yz_QZ9#V_k~XNVa%_8$9$)`MYUfwGF zo&0xFoP9SH6aq^Pg{<5&{`L}c9ce$>u_wCD>pVRa@Jy?^pgAju8SC+TkWET5nY>pyA$UZpE3E3-zvJTnVdy{#}NcJA*SRtHa9EUUBzx(rj{QkWkoO53H zdR^D^dXCnV=jc6lSCddY1>fycrCy;m{t8yp_ROFJzbfaplpB_1vkcCXT&YI-?C-p! z|G5ygA9>whX+2hrtEn8GbFW|7jBb8ZN$mvHO*RnZg1d10aRW3CDlm^c{kdZ4158U8 z@dcow^B7uoq!Hf>aX27CNu}K!z+0BK4|GPn;fJEpq z+d~6STZS%LD zUL;)$n$HRUyqJzWi)t4v>gEBQ+6B3XXIELU-D%~+-?R>7_|<<{NcUsL`A-yCduS#?LII>zole*ZBAYNQBCd+HF1o=aSmt(4C{m*G#+8Wc?8o9e- zxvw}fKmUxJ@_BQMofM$a0h}&>s1kPkpBTnh2jeii(K#c8r9WFOlj|#?*Aa_EK?ut! z<1}$o_!@#KB$~`=G1QZw!Hc~OxWdCr@Zf%Xc5H+gd}MGY=Ho^J`Ovi+3iF?ZwDcqU z^KK)wCjXPc=zspK@wMQfG@E#Fe$XoXC4a`Jnrgj*Oye#WJ_=(X9%=$B*avTv zItUXSG^T#r!V{2PWADn+|H}W4>e4}+@B=I3(f48Pbe>2@>)HCEoq5|-kZ*|wE0(@t zw|(@}stsr@io5%K3kaE>`!}^dFS(fwSO<0AerwLDMDWZk!Vlq8t^T9v`s`}My^ewG zRNXyKCiCMTPmN9cM|X5kMYuXj(_oc0FqK}c5CWQ`ZoRJ3?SARo%fQd6R<0Rym_hl! zuOB%*Uv+BzaTme8{7_{s?QZ#;vc@*94YP#fnUdr*!otJPq?p{8bB|Z-_AL~@A7i5f z0`A68Fm%qDp}gV*8Pp7m2QfILVYo=r49n7^389WGR;FJpD&MOI&xOPG#~{UrHK=U8 z#)@Qy-ZD3f2hVLzXHhX$wVQGpPg(SLPdrc2`Ex{VG>B=BzRb=R`R?0*71GP`suo|s ze-tO9RFrXJkC2Y?rVW48jLV4{7?0ViR|VQARVAsrkQ(tK%+aE!BWR%R*`%_qlW&sj zT1yV32F)08=(qMykPfOEBocL3uL;=W+)lvHfa=YcmtBv|k2Y%1DN*I#n%L6nr(H~1 zYW&4Z9m$Pt&LJ>UXCU^WW}XW6?#$(qj!;V^;+<&?-vdcD*wywbF`UyBr&iqgz+car zGu6kJ&bdVomY;q&P2aM(!NlLsN3f8-R>_nQq&f4sO*%2(`>%F@k>t{+ncv@zaPKg) z5hxok7W{~5wq`2Ys_xbIrS@m9UtYVbO_f1EZqoW=KTn*fh&M%haq4AcX?NIIRz5o@ zU(3*Aql4zBw9TJHn&|hU+ZdUMI^E4JvHm3MW2ZbX$$K**xhu^L4iBydiwWd@C&2!w zY?nH^m$=L9S>W3JuJUV$t=(KP#5o-`NWX{h5dzCh%6bkjgO?|YF2@K`bLpOlXm>@O&;;7ZD0{4<_=4?n?y@BmUL zbAn$&wqL6v!uYd1WI`&A6wt>?IO98X=-Z+uUq!=`x?HoIGttH@cv-7ye}v%frB3b# z;ijxjZu$E!a^vpU_B?;D$8V@hb0AoZ*8 z|A}|F&boBghM!c7#!Y}y?+r(1tDEx%Bile_Uud^vz;oZEiaAT3RY|hvmhyJQ zx;l8UTRoJfFJre@8|MWYpRSSSuQdH=C7eQn!Cr>C=RHpi&Ni!f`hB}pcrLAN+*Pn) z{NKq2V~q7y(|T=k|HST{W28wQ?yV$~L5}qxEq#>=`$dbiX{&<*WQ!B`0Pnit6%&}{ z71cQYZ%klv!n4M+w3Q{>aw&lGHQn%z>xCl?KQ;wnh&Apbi3l>4d@We9bIUk-FqAo= zg-{jBcp`c?^tbGD1B}*73@*M3A7N(`s`mEN!10ppFT(Z-?_0aVNY8br9d^E<7MD*D zcN`C25+arE;p$1lDC8TZmKY(LB)e2L(c_ZHsf`)juKBii z8Ei9er%Pq>Gfbp?C8dp1RMmSt|CL={4Q35F`*63}ci|T!4igm;BJxQ*_5R8hHz(dV zxgKJz}ta48B9aJCwc9A1X+)qOCibtXAE(e`y z&E1t)5#4~hUl6YrzoG=$_p}UEBrB6LSbp0+N!(wG+jVZ4+-?+fh!J;tNKvVrJMgB8 z_^d$q)>UPymtrKBrwvNtvIy%mvpOfo&6IEz=QiMpnoGNtIm+*A#Ihx!nk8~%*lGIV z3O~dBdO1@}$SK#p)h|k&t2JDwWm4kMN2G{X==!NdRnwll$)WYrbhMI3*D!z-Pkw*C z&D~s6JihKB)i<$Qz5}SKd7|pd+hNCObGsB>Zu0dRpPMJCY%@Ky){Bd(dznQs8{lQ{ zb3}so?rOdG*Mol<_w|*l_(3MEA=esG8u)zb5`vxRfL3@_y2nh$!@w$4$2sp=-M(d} z#cL}AX5ARiS%sSsX)l?>O!T>ns1qZQX`NwbCEzbtmOna*3d7h9b5j`T-vF$tyjOJu zWsM-cd?CVNz?5~ZG#Rwpyvm3FN>H@7S9&=ElUUv3IE4I1p_|s5cb6jT!i!9+wb5p= zf$Uqk=A?unM~75rivttbD-SLj0LDh1w||WdP@SZa|5&^Hkl~BZVA`XveFf|b{5P2R@r8SBk=)Q+{8XsBiVsEd)`0 zxUKnb3~L z^Hkp?-SeRHP%-jNI6(#9KV*Ok34y#jnBBU$M?L*uv{3a^4Q40#3u_XjX0+!jQ5{Qz zq-TeM*TIqP91V1}LUzV30>2z{GFDSXi1)9(XT&;Sq{k7EwBGi$s3ws~<7koaGvg8W zTheJstxD$yS!Y803{-2IOGCxazI8h8o)Y(w-SYVV#jJB2@|t5?lAAny%?aW>9u<4y-tHdE$0+SI(1R;K z1ZEj#^qH>XSO4iiNK2P8{c)Ypfox69MEn(>%b_SPgC98ma>5Tfsm(p-(Dl>{%cH zEdm;R&A@B>R~25KC{&2tVRzC$#Zq=Z^kL_(SHDcy?h)7EgF2=40w5<*3`;${Z<$S? z9cXv1muNDmkO-M8MtZfD!Jn-$7>dLN(t znitz*j2Enn?(bO?!c9*5)g&&?9EFL4QG6_K?Qg{!%PdrXliEl!mA2PARvX>!;=~R8 zZ$;317~@{A>rfMneH}xK7ghxqD)o3)V%aA9_0t<7X=;gi0czb3fZ-sBWewl)91Hq~ zMUS+W&XW1;ErIQFhkNE#Ys<;E&%mX57|5VpZ4sS*F8bwS2X{{T0x;#q1);iAuq%)m zNi{Llm}-B!2*M+fmA1-6K3sP6>Mc$77|F2~ymX#7dW{;YT)sr~!0dcHa-fyyE_N#t zbIPrA6fIG?Ifs!VvdQoR@%6Ooh(zI*$!=~po~Z?UkLK0K-gIUyhW`d>71ccXuxT?y z_Kw;)(0ZYwLCH@~2^2Q{Ux4pWPe%q7uSxmQM31!opA!bhQ?L%c5KW?eY)t>x%*OQ( z*NUV|eSM;6E8}^@NaALQlfQhnb4o8)9Mj{7l~|JDb59t9WB>a|vZ---$IyT%N$iJ3I`Y@xApbclm|mwi@4rle4i{ zA*tUmF|e6pd@_~m*E?qGS%pux=xbftgyen<%=yKFtudGPMiivEsMmzbT2p}?}2fnkqGTU+g@%{Oo0;9%USMAUmQnSbs`49LS z$azHt*}Jc``Fry1iF3p4GuitZnbFN6f_? z^D-{<<5=-^=>4@=4SG`PnMGGfPt&-6P=&6Xfx^j8ebxsd@3ljE!r{&xxevBg6UER? zB^9p=x0)3m$S#SF+-dpza32lN5_u46gtTkJv!f-h9t~cS<6UKTGh~*Rbwe3=gkOpW zN6b;CWDz5$;p?T=Edz*z#i5GzqsUM2J~8*Irq4VAiB!qKuO-=?=^pXlP<=0(;M}7F zdKXHGw#38Mu5`0Juy==veG;iC7d@=<Ho{3_H21F0a4@oDII28J@gI*WPl>f%<09-ss9Cc}H0>$feI3)P$p zC*=~gtUBAGhWif$3_H_85w%aq066Mg=lAb-VKpYH2MvUG?zM|+s4!I*Itx#qiS38) zJC&gpon9k{--Ufp*;%Z%B|e(;H@qoD3*j|Po$CLi=bU`t>(w?bX}sFjw$7r@{Zd`V zol<|sexyGIZ@tnyC|spcaEoy6D^Q7?O5!YjHo1G?m2TYeC&5B@CiA`A!J_C*rUc3# zV|wJ<9SwtBodZ_@M}`rb)z0>>PQz81YKV=UOY;vs=KV@L`?h;=5pwcHbCNkhqcqQ6 z`0R1AzWO~wtros!{jG(_4s!di=w-b-Ku~mttip z?oYB+JJzr%lPG*!kZ@<-YWHn?ox?{TP5v9gn|UQ=_|-+^kM@#mAWeEi*m`64(~A40 zlZh9P!PAby$Q(YststeLRdqKYvdW(A&Aj|i3sIj~w#4CJ!AXP|<&%~y6l?za%MZIA z)cTh*rlI+@^e%8-f#n%`V{4CY6WGFb`|g?S?dhG=!xJS`=rw&>rg`f6Du0^vpDRS$ z6N`~F#;>h6M=5QlIcBdka){odP?i_%=M9l!hCY>cS1g$`teueZzu~;;jJg{0${4q> z1~_(C+BrVIB9s!P7uf9W3sgeD-o0xur8y_jQ145 znYZTn*C8mU%Zo#h;$l2g5!5qIcp@rvWDP_G(&x_d|P2L2=a8;3`Hr}7t%2v6|H zK|}*U*tzoEa@StGX|^iEKV!;zV!o4+FaKHHL*%NdKQ z@2K)3-wY(^8ej;78?WkXyQdi!dRpFr`F4nLr$TjOtjVN0HF=b=AT)4>ECTnkTq=#} zme{lB*E@}O@L>5puN(jKhmdOL?_f`)X&lC?TX0Vb%lZW4v6vk~=;5$h@K|cS@1FiR zIAKF$p(bDL_6B0U2FUk}P-Db8|3_gvap;%m4tUpqC1Twp+nnbm$YU+MzFYqsm$uYS zYlJb1$?u4v1bzHty-iu$-OJ+6IiS@HhD`rVOg1sqFP8J0@@QBg8e`m%VBVhfJOq39 zJ*XMJ2H+boUf&I_Bs0VfV~p9u@08!#vT%|!LyVw~uEHj?@rMS-F;SKSM zumr>xm=t*=PZGm;KwC|uoHZ=T4MF|NHqN_VYW1~RVsgdK@#cF}=bsmw)V(`N%94}1 z-!Nm3&()2!Noml)a@g-RdK+9JHmP8}<^~uHiGDdHDKFHn>Iyb)YzyPK`>LtrVSQNM zz)6Bd1FWxyQ$rH43N${rPtbb1%HExAJqwjB(JHb;k{cU2Bq-Y-4(VyT^m1_nR!;yo zWxeE*A$bD~?469=H}J2u_#5_EmPdrD9m8k)_49l?HyzeLsRx}__u~ZWr#giQx1eg6 z?RdsCR>&P$F&0JDu^kp8irr73n@dX-@4OMoi%rvB5GQD3iQQP(41f^x5VXiQZaU1t z-575)kL}YBXNtD`ZaSG86b%0a0YZ?$bP!SF*7iC(ux{dW^g0KNy^%u6KR-dmEifE5 z0=i+=3vTW%MJF)}{{#3dSBYg_jfHSVi6 z1Rj5`iL;96Kb`)V$=j$N4rHA4-G-}Y_r zR!d6or;71+b_PSn&JPeVdeK19-Q?46>~gVIqB1>;lNN^yhh%HC zR*$2kde?Y~u2_mGs8|W20qBZ;8o$W%-s=Ap+UtyNr~Cb!UtN2YW1?3))%ac#7usa> zF!(`|jPWb`h#KNmEMpjvQZ&B%-hM!yKo;QFddI=JtzGm(f!QJAIfT#GvWH7e6b0Cn zYzhcqyats0NjhR5JQtUYm*@-W99g0q##5>Ph^5S&jk&+bS#`TCI!`@iS$S%g9&%5# z_TT-Yc{Z{b)Ew)Xi#gZDY#qhiB_k*Ol@C$13GZ%`dm-Er46yx&fHZI}d4Xs{$|cLV zYOu9oGLi8f8WJ3bq~wo8?~+fMf9zD$nkS8~i`53Xw?r+C^UWS1rT7-#s_=Ig`a5_Q zHn%y4_==3J_df9NMXsIJ0K#>5vMfFnP3Sn#Fb`j}%6ifSdT3&kf*3)-W+uxna+T;N zyII(G&8D)R0v;Pw#DOoT$r!Bf2R_)kMd9BleG`wu_`4F*CzKj7h(J-a`7sz$l~qv-K@o~}67Ht}J=tRVJf=`}IG5W&{~YrqWP zf`MW(;7pFBT*))BT!G==^UM2~?Dcb%urCxnZ__oUj)(HEA`fvj32Oe>TM!M}B`JZ9 zM`f<(I#l(7(##)qOgr*{Zp6s_XQwIQk!b9hx-J+?mpUF=w#+TCy|?~L)ONiNoXx#a zI@z6z=Ou{G6J7BsA=qrBf})t`@vX^!QaL{-2E!?S{uqA@kx!^m4DfTl$Qt!Fs&pM< zp16m6Lk?6y;6?Z|2b!?B;*BHLziW=ot(L`;BVi|Zgt2wRO{10lL%f>iJ!fjT_T`W4w9|dWCk9n@kba!az&TbNKqf9ZK~MvjCT{=)ejKqQQR@`mu8mT}>=# zfCR-W_R{j$A_Wm;Zi>PZ&AFZ`W3mG5p|RXm{91$Wb&HUgaa8f}65h&;ERrYMrRoLM z@?jhkp`aFf4|!sCa@BXT7HC;E1?leKU*#_#ORZLId6QXf2mVdD%U~PEUc+8$Dje|f z{IV%z8qQe{tPaj)D9rD{Y&GZqsj7)HRRExR?ur`)5?S%GK|%+I7FE7rObWP$;@luhSh>&D!63=&`)%SGK0zanoN(k481*??%@cC$42P>-IvMs4%l!IAgY#mlKp z)he}+AcY$nK)A8W3hV`bkGBM$UAF9QCY>n1fm!pge&vxTpqi!*>q9|RF@A_)?Z8{NWIP}m=+!CU$3Rpz{uDtd!6*#Qe7 z6d&{8#Xj&l_M=6z5a5`T!%NGm78u{(D>}+nLpp`uHY19V(qYQv8IG)~y;_`8s39`Qw^0xo*T_5IAVU;h54c+YW^rMaL$BO+2|1rgK9TSCgZoYN2S zE{2ca0tRBRwfEAub*p^!t?!+C(~Cjhl7DN9_;k!LJW@by8pigs=$F_1G~G`p zxT4Q#OoAhr>SUZ5#%pMt1)rx5M;Wz{VqM-YNi~H`(y^pM;zvZ!bMfcZOXwq{;KCFd z6#I_M3w<@YGy7&TNeDX^nNn!rdilC;dh;Wrq!9ypJd9%a3)^_p3DPx-=(M z`6AVhwigc;;vUj8y~xvi{lLwtB7HP&DEXU*M2-2xmVSf9Jf9qq7CeIcOZ)o>vQ< z&9IZzns#n%dL1ls<};| zdaifz^ODjGLf9}{)_sLR;^2RFFfJb&d3z;_g8ffF*{zMXjSO$>T=&iKTwV~a`-xN? z7_Upfd#Ioke*nT;;kl9 zh0Str==;g?#8!tZYAi(wg!39H6*aecI#-gjUCd<^BRLf4Xs_swA{cv|)P4J(55m7g zO|X(hd$70i^q$*KG)k$BLSQcsVFLI!C=aQp&GiWVq(k&wFBLRo<~)JE@LG@%P2L5r zy1*|cJ;Ha;3&Pv0aSMAM>j4#9C&?Ztwy1ug3615sH=^2tDf_vdLhTAlOg7Z_7VvJEWRGGBRIA2cIGWrL-L=k>nIwvOpewoE>J_`+~SLb<_W9f`)3NJPjk7Bdxbk2g?n0?zCM0TPqh8?`rr|bmGm{emn^pHlzOdB!+24<$`D-M`Ilx0o zEDD!_n2ygj)@a_$56;?A(<+T$VDL6y8PcaN6sap+-cwMh|8=bb$v@f)vs^fTN3-8l zcUq}#_3ij)im_;ZmAaJabR&Xp6=J?>YqJK{^;L=J=bDZrw(yKi7r092t+-w}kvhml z*=fDR>h{yxX{^0(!>&x;LjtAobzD9ClRO4Fu(mLc>}HrLiPuiTKl%OiP3Vn{Dp&#W zHD1;WYrXa;LrP#jkGBUi+w5;A`r;4ASc!gO@mQgl#8xL(Mo!Tu{HBYlo)G&MS(=pE zw8jd6EbqwEc^a)gkRZ7p21)o)?yZ+=h9l+y)5M<{;~Or~znnO!?kiCNFWCwiV_v9k zlOsntYWhkrGn3$C&*L-rydg@MaZz;WHw9UYpYhVQ3Gp2^kllz$2J)XJuWa&zw2-?c z+ts-tXT%X;`(7upZn(spGB>L;F(CQ@0d`FYDed(v^mTi8hssxhfH+sHHX&t=nZH>_ z|5Wp}Z)AWIU}DZfiHiY=Bsr{2PXqj4S9we9St#f+DIaQADz1={ifi|bGAR4mdm=Y0 z_=&x=F`+#?u^sV%yp(sJ)BYY-Dk|nm{sfEce)`8Yvi}Itx@VFI#)55Tp`2B#%e(E> z4a|cpIVQj9BFI5Q3XaM(R;2bG=Z-O+-~Ukzox@o>wJVLz4ZQ-+af?2(18esIjX;PV zCLnKS13}-(Ukw=&Dk?bWw>`jr$}= zv*c%n&lhPv?bbbeY#7)g82b7|Vb+CZ&j-ZtAH~}>PQb#jqysa8^ZygM+|BUE>rC}P zS?_n1H(QCa{^u}4V+=oHtp+e&TQ7h&`pp6nQ}n>SL<@J(MX%5ho}b?Tq3ocp(Vcz> zuF={Vzf`Bh?|cshUX8C% zY|M7*ELmWq`#{`Ylk!%lV{X6bd<@^=A(rd)5qJhSfF1E4#UM<5HaVvAO|`8XXBErP zxrJ2Qf<=k(hQ*yb6Ls!rL;Im$Reme>=*1f*MH9XE?e>BrlA5e(V?3(J<8B{y7_Zg~ z_1J|4m77_&BuU-dxpgdN{AOFyHyF4Le)Y<);9Y=-pfS{S*q|%Azs%;rKLd3iP}2al z3e$VNJzX`{)y~iR83YIo=&94tNdmMr^< zO1t8`>`I9CS1KO`y%m<+;T$?dcCrzPTtIk0q$TBfE}?1uH-H?kw2WFPdV9Qx+Cn5a z(P#vna@oV0e!+mB#1C=?g?ICKz{dL*2X|tv`(?o0d2~tCGAZi^aT>u_(thjL2m$h> zK}J@hHTFrj3>I4S(=uMwL zxOxMP`GPs!(|?yr71MY z^$cih4B@Qa45T`}&T$-;^_YEd0J2fuD74UwnVffGRRdk8f5aDsyp)fIKei zo>$)sYrVArD%SLZ!NoT|`Cgb%uYIia2k`+z;YWb^L$;~!clUEN?OUylk;q4`)x4yb zKOCSBM`rE@`8N0n)2U7=v{F+xt=NT!$C*0>H|W3BJnP4;=1Jv$U_ z{TN<#n}J0i*H|>yo57)6C$Rs(d+%ep9~9$x3JEvkpEORMhRQ7IO#B;cs;acNWddA} z3iD{HeT=K$%F3t2P-f-HexP?jzUS_9JS_)YW>(j{N%yYIEk=KqZf|hvl;!jM-g22b zigzwceO=SaGl?)!c(-}YLZhkFUFqcy2c;uR^Lj^a(V8mz79Snjvnq-v@6d?0C9HgI zkW%)R`M!B%yKT$Da)Lmqd{YVjs>IWWvpe1o)uIJ|R1gixW4>WS!vULdg=)Rouaw52 z79MD?D3!x6iCfxi9l^4%W|$P)$dOD4=YPO&%FOg}@-UZH}+2eh~eJUZ~+ z2O5I>z9?i&r&fqZ+bit7kKQ>5sTvg6mORe`j5~nZ{67k2e5N_((k?z!>G$T@%AWxB zF(?7(Ukl(j7$dqxFg_1E73?JlPgey#{$hY*7x#)^<{U@g$AQC$x4&7sMqHOgQkbtq zrIa?E&r5v@&4aJlYQnb~J&wuQ6+-0Fv%jMhz@QyYU3ysshDm8O&7bdmadPjj$Cz!yIUuEZby zuc3vyU>%3Te|g-&Gyo(b9koG9v`sll=Z*UALPqd%0IoJ=lbG+vm(|1;<~wF8oLBOV zVD{}3qxD`!G@U?6*t7$`6X|~Ka;&iAjec|e)$y>^8duEIdPeE@(u0Qj23p_Vx4~E< zF;}YW6c!{E%2hQ%(zx&D=EI-}wPQm&Z6_3@eAC%v&((%m;Jq^QMzXcz+R8OC(t~Wf zFEsYX#ZSBx`8KD%b~+iIn`MXw?mo7!Df2F9DeSG#`ZG@ryiol8EE&|tKW5G2mL`TQffjNLZ2u~Av|NT zx@s`?%!55NtEF*{G`Gld&F3t=Zhv(CqYzT*xczs~H(a;bc*jJR<058% z_qpk-YW^kR={~aMGy@~*rOA54lPS9OIVINYIZ>hHzKX``o$7ne7e1;}y7sM=v+H*C z4tf{u1oIp^YBrvcI+Fq4)tUzgS!efJ+CM5^br^%At)|(HGZ-{A(zyd#cpJvv*x5Et zWvD-RBjCUhAW8T>6OKA(#lL(qn$K*$evR3tZvTnn@U=6c@6+9m-%*AZEo#SZRto_D z0k}}Qyy9s+S~e9>{JyGOhM1uO2c_Z+Dj#-Eiyw6I7iud1$ce44z8U;Zl|A`fWsYTA zj1@QQsp4SnTv)akwJgJiEB_o6Z?uVgG5r&{rtf_9#>rKmx66C31cj)2L;jhnRZevE&>6H`8hPbcfSs=HgP-2E|B9-i z#e>rl)GE(dKeZl)+|2}hF z)4ywt@V;a!fF?6rzJB0fe7Cy%4WGpw(R+K8NzrRrCsGR|ICBj6-%ha`k5t%BM+W~> zjP2-Ls%I!FAg4KK90h)iZ_%TC(O-U1DEPa)AN4nanijtuLDQD_mWPY#j>ZLb`Todu zZ=>#7L`Si|sP0V)6Oc-A=(nH<&ryWu+lY>bTWW=a7G%k}Zec+5VOHTL;>-Wp{KMxT zOam0L@bw1@J6+HSf#)hUq}^Y-+5L79g8We zhnJDqaW?J{`}q>EsT7$STu*mM>1R^+CR2Az-6Or`^ae3IWZ7=jHz8hE@+oMfx0KG4 zzCE-G4xT-8sry9@#AFGD2}$Y`zBL{AO?)^;^cA$U#9gEGx>LQV_$w8zlKM+1R*1{= zS?bO!ArFP^1CtB4N9XLEi~R}SKTpeYe-+mvbHBF9eP+@BRpI)s>So)a%Ab|7m`b@S zcdO3mXMg;@9(f)*TUJ$7%}EL6tTXL zv76K>1*lqrJwg4*8i7~E#K*JUT#tg#9)gyQwU!6q@$NR$?6-ZgYTtKaFjp>;tAaM+ zjfy`EcI^-MH;^B5=QEtWSe94@^7J*{7+6p?`!}_;@bY!$G)N0?rX9T$>5vWGu+=Fh z=n*Mm4g~qsgpj0X0X;;Y3LEkyoPv@FjO3w0D8UXy>m$dXBMSRiBaO{$)tuax~? z;!T&}SU;%)UIK_R4U7E$2mG(`|8d}Qnl^j5%Gu+Fb|MvOP-rL+eJ~#`@&EgzB#TlQ zpwnOjy%9TN-h;DuO8}wTbalW8TFAAgY6ENct-HZ@Jl#d$&9&Y)R38|OdWZfcy_ca) zV#{;2D7&2F6$G+3X$Ms@3ejcVB6v~#=;zaKS{tV%IhclVu1f*Un>{rZSseYS2YwM#x*254&V>$8%YJ-S5p3? zxZBbP#+v{q2J3farSIb1>$)>97Q19-Ez~|OrSxb1W3dfqYVQPJ#IB_w;R*5D%N4-U zuAvVKpYHVls_@wx_;btyb;!OnaEen~9dy*Rynw@hM5+U_hKjd_nSUE8vnQABy^4nC zSJLZljQ?;Sl0hVrS!e`LAKC!l>Yu5IM11i0{J#Z(&j0U_-&J!JmuaCzJ(2HSWXs$G z+fD>u_f;l6JPIX6ye9$7_}VLhMgev;N{Gw}{|Ltv04D+cmwV0-_B~MeBj4HB%S?*^ zc-~{pYK@Z(xs zExh;UjZkJOp(yp9)_IcUv1W zj$G7@g+2<|5Tr2(EcuyP5kx(+q=E2b*!=A9X_(82DQKtITXh-Nm6{{1!V8Ofnbfw8QjzxZy-{N!Qj3NX#JzWww$<`oEi_z7jWv#F3nY4m z9C6??Hq2Um$W9-_jY8)Bf{E;u$RWF{8`^)~Tx(`-6?7vWq?BzQ&MQ*0 zSGmGv%2=5+cZbB1!EqpZ84$WJw;AnJmzeob>MyM>d6O64W^PbxCTl0wj zGa+%z8<1!u_SGv6PWOe_tvIDi>K4UMIpK>jqXm>m9xpPrD1KJIBa5G9?V(*K*6sCJL250613w!-$jUHWLkWyixh^pC={f|50QDegHgwJ-43hM_lVSTRn1unShGl z;ZIEQG?<+q9E-3P?1SO`q2R3eg3?}Co_V%M4k`&2AjIb#cajc#@7@-m)D4)D{*MA2 zOO_vI|P~1I$*-wzzK+hY%c=cpma8Phzyx00X21dEZlRn$xSViwL zm=zc!c_+>ASW?*XL$i*Ta*-u2QV)OpOEkqko}f*%!d&b5k<1y>=~#-umv|j>plrx? zh$;CKP%mf+j?=*)`^KXK>_PEJDYRGQWiDwpWAWNxC5NJnt!-zKfza>j7gCwW>;7t4 zP177Zc~BKn3NX~RK$R3?d%x+)tC*a^jq5s|9aO#c@z8Z{qT||0~m5E zpQ3gaA4s4jz5`|#glE{*%n;Pd!~Ms)e@;W;RvayhIeMBhkGRZN)s?xw+k;3o8oZFg zb-`E|x9yUh<-Ue6zR4H;wordPL`h;R_T@MFZR!)YROy!OrD+Ra3=l1|O8T-B|3gC# z-?V4DvUvC$vQ$P#yTc*SnSf?m&4@EV9KH5U4W220$!iY5Y?- zmkp$&UxPP%YGCTG#V34&Nycxz>J6ZT5*mJ2ccMT=*qZZH9<#*$M?qu-0;s(D$Eo+p zk~8wg46BBy>^H5mauQOOR$d1v)!47!Ze0;z`?S^ zRS|80_f3r-J?H#@muL8kYq?g!C$!MwAlzU;w*skIgVueGM?)IGhKzXfqtT3ENr~SU z>TTjuE_!nw=(bZ|q-XPak8wJ<%mV`i71%>JGQ*Ub)i?ZHA7>^HGRmXrK(!y^ zPA7lkKm-^G8uJ`F{at6d^4HKBNNeSI3j%0W&ezG}y4bn?ERKpGbaeB}S>hx9uEefR zQ~EzfRCgmsYNf)FTd4|UaYhqt>46wV6z4ESW`e%J3PgOj$L|~Bc}o-{W`_2p#&# z>zlZsE^V{QPWnu~4E>(&=ey9#BEB^ z;VTT2LSBitm{5b87SYc+Gh?-6@572DS0DN0fc~`*IPl{e669!rJjo^!SqjmBv-P^( zoAZx-cPJmT@%7qR*S9Af#c8hZDccD>zHdzL^l{N+b0CLHwW=|2(0y2u6-E#QCGDw6~-Epq{ z@U6;>iKjt{@#0a&=}D{6{6^mc8O#>^8RxTc@NUu<4`|RE%=6>bT^RGD8U*+5BH+?# z>o#`X_e!m+?Q0{3om0`Z&{YrlHGRxp4$xPonKZKk=i6`J? zyxyhTE&C0+zE6Cv`1c zFsTE$M4;%^dsuK{r&Tc|OrGBzSyS!0xcW)r`L>P6zUYE8bch*}vLe)1=i9fD=MP}< z-+JnGV^R1>%-V)g8aiZ!@dZA`7rjQ`6XkiJG3O;HsMKq*bhGAjX11DXItMEGe$7vAjft$KK^@&al}=U| zaXk31SXwZaF%~LYhL`GJdlCP&pr%+2k@-rX^ zz7QI8=@2*p#0ex5c`*Uz2tu&gQ1oR0B~fq5KQZDMcBpp~I1B@6FnLmUBGFLSK}PcmXnQ z9SERn@jwCJDhcm$2)us^Vq+DzVP^<`@s{n$wr1-q-ur^Lm4z9;{5Dd6(i3R#UPy4m zM+D1lQhujI(Ox}%DC90+Nh$8_TDP=RZk~u`*zgq+ds5!plp{WGYfwS&0e9C4)>jj+ z*M3n?UhOeW0CZc|r^lqc0T)zcRq%GxH`tiU`3E)2o-2UMevg3ruY%BTbH><1M5Qe= z4*!ibIwzfmgB0!^EjiTik+M4tssXiAWHI#F6w%nh^VDWKYjJjqZ1oM)k*Wx+;Zx)# z&t(p57F;g{OwpprYJ%5KK^*g7{9fPoEDk7%?VEVin+BJ`U0LEi)1=DuS;G^*cG|NFJ!m=EvznAi+wn8B;2>Z8z}L6#neJE zBR>?o)uPcf(M?=_+2alS4_zhSWb}5(RUp#!H`I0CEB!VvCU`(O@yXailyehGzlpY$ z%!h|-O`Z8gdMq3k*XMNR>4@}Sy-p!*c3|gOKzM5L{U;>~0)qW!U8t@mxpAURT_(!Yh?cxsyX;(Tu z+mP1RkxOeE28vY$Sb%w4h8$!Jilgz*_TczC;y;3I@bAN39oO|OT1hSBw7&6-v0Y1| zgkoL@B@EI?jI38O9xyOE=kARs_LKNS;~&@`!CoKu)!`2Uc;`ja{vYV`u8*c`(b`_e za}Jvyl^~HMSGSNAIoeJG5uRwHg_UdB>=CT4e3_?wOYps?iGQ%QiOe&OL~Pqw9A>>o z;a;<=_|wK-9{6YBE460v2Dxu(X>oszvr0k2w5P8bug~j$+P}l?@nZX2e-Kz={{T_@ zQ6JhyfAo2H@_K&-cmDvg=f*8)EsLMDKg7#F z7~4;Gsa%!}L*Y_ Date: Wed, 14 Aug 2019 10:34:23 +0100 Subject: [PATCH 56/65] delete example.py (not used) --- .../transfer_learning/example.py | 100 ------------------ 1 file changed, 100 deletions(-) delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/example.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/example.py b/src/traffic_analysis/d04_modelling/transfer_learning/example.py deleted file mode 100644 index 5f075ec..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/example.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding: utf-8 - -import xml.etree.ElementTree as ET -import os - -names_dict = {} -cnt = 0 -f = open('./voc_names.txt', 'r').readlines() -for line in f: - line = line.strip() - names_dict[line] = cnt - cnt += 1 - -voc_07 = '/data/VOCdevkit/VOC2007' -voc_12 = '/data/VOCdevkit/VOC2012' - -anno_path = [os.path.join(voc_07, 'Annotations'), os.path.join(voc_12, 'Annotations')] -img_path = [os.path.join(voc_07, 'JPEGImages'), os.path.join(voc_12, 'JPEGImages')] - -trainval_path = [os.path.join(voc_07, 'ImageSets/Main/trainval.txt'), - os.path.join(voc_12, 'ImageSets/Main/trainval.txt')] -test_path = [os.path.join(voc_07, 'ImageSets/Main/test.txt')] - - -def parse_xml(path): - tree = ET.parse(path) - img_name = path.split('/')[-1][:-4] - - height = tree.findtext("./size/height") - width = tree.findtext("./size/width") - - objects = [img_name, width, height] - - for obj in tree.findall('object'): - difficult = obj.find('difficult').text - if difficult == '1': - continue - name = obj.find('name').text - bbox = obj.find('bndbox') - xmin = bbox.find('xmin').text - ymin = bbox.find('ymin').text - xmax = bbox.find('xmax').text - ymax = bbox.find('ymax').text - - name = str(names_dict[name]) - objects.extend([name, xmin, ymin, xmax, ymax]) - if len(objects) > 1: - return objects - else: - return None - - -test_cnt = 0 - - -def gen_test_txt(txt_path): - global test_cnt - f = open(txt_path, 'w') - - for i, path in enumerate(test_path): - img_names = open(path, 'r').readlines() - for img_name in img_names: - img_name = img_name.strip() - xml_path = anno_path[i] + '/' + img_name + '.xml' - objects = parse_xml(xml_path) - if objects: - objects[0] = img_path[i] + '/' + img_name + '.jpg' - if os.path.exists(objects[0]): - objects.insert(0, str(test_cnt)) - test_cnt += 1 - objects = ' '.join(objects) + '\n' - f.write(objects) - f.close() - - -train_cnt = 0 - - -def gen_train_txt(txt_path): - global train_cnt - f = open(txt_path, 'w') - - for i, path in enumerate(trainval_path): - img_names = open(path, 'r').readlines() - for img_name in img_names: - img_name = img_name.strip() - xml_path = anno_path[i] + '/' + img_name + '.xml' - objects = parse_xml(xml_path) - if objects: - objects[0] = img_path[i] + '/' + img_name + '.jpg' - if os.path.exists(objects[0]): - objects.insert(0, str(train_cnt)) - train_cnt += 1 - objects = ' '.join(objects) + '\n' - f.write(objects) - f.close() - - -gen_train_txt('train.txt') -gen_test_txt('val.txt') \ No newline at end of file From 04016beef083a4fe10bd778a901983fefeb34750 Mon Sep 17 00:00:00 2001 From: Jack Hensley <36745629+jackattack1415@users.noreply.github.com> Date: Wed, 14 Aug 2019 10:34:39 +0100 Subject: [PATCH 57/65] delete args.py (not used) --- .../d04_modelling/transfer_learning/args.py | 88 ------------------- 1 file changed, 88 deletions(-) delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/args.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/args.py b/src/traffic_analysis/d04_modelling/transfer_learning/args.py deleted file mode 100644 index f7d9cd9..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/args.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 -# This file contains the parameter used in train_tensorflow_model.py - -from __future__ import division, print_function - -from utils.misc_utils import parse_anchors, read_class_names -import math - -### Some paths -train_file = './data/my_data/train.txt' # The path of the training txt file. -val_file = './data/my_data/val.txt' # The path of the validation txt file. -restore_path = './data/darknet_weights/yolov3.ckpt' # The path of the weights to restore. -save_dir = './checkpoint/' # The directory of the weights to save. -log_dir = './data/logs/' # The directory to store the tensorboard log files. -progress_log_path = './data/progress.log' # The path to record the training progress. -anchor_path = './data/yolo_anchors.txt' # The path of the anchor txt file. -class_name_path = './data/coco.names' # The path of the class names. - -### Training releated numbers -batch_size = 6 -img_size = [416, 416] # Images will be resized to `img_size` and fed to the network, size format: [width, height] -letterbox_resize = True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. -total_epoches = 100 -train_evaluation_step = 100 # Evaluate on the training batch after some steps. -val_evaluation_epoch = 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. -save_epoch = 10 # Save the model after some epochs. -batch_norm_decay = 0.99 # decay in bn ops -weight_decay = 5e-4 # l2 weight decay -global_step = 0 # used when resuming training - -### tf.data parameters -num_threads = 10 # Number of threads for image processing used in tf.data pipeline. -prefetech_buffer = 5 # Prefetech_buffer used in tf.data pipeline. - -### Learning rate and optimizer -optimizer_name = 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] -save_optimizer = True # Whether to save the optimizer parameters into the checkpoint file. -learning_rate_init = 1e-4 -lr_type = 'piecewise' # Chosen from [fixed, exponential, cosine_decay, cosine_decay_restart, piecewise] -lr_decay_epoch = 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` and `cosine_decay_restart` lr_type. -lr_decay_factor = 0.96 # The learning rate decay factor. Used when chosen `exponential` lr_type. -lr_lower_bound = 1e-6 # The minimum learning rate. -# only used in piecewise lr type -pw_boundaries = [30, 50] # epoch based boundaries -pw_values = [learning_rate_init, 3e-5, 1e-5] - -### Load and finetune -# Choose the parts you want to restore the weights. List form. -# restore_include: None, restore_exclude: None => restore the whole model -# restore_include: None, restore_exclude: scope => restore the whole model except `scope` -# restore_include: scope1, restore_exclude: scope2 => if scope1 contains scope2, restore scope1 and not restore scope2 (scope1 - scope2) -# choise 1: only restore the darknet body -# restore_include = ['yolov3/darknet53_body'] -# restore_exclude = None -# choise 2: restore all layers except the last 3 conv2d layers in 3 scale -restore_include = None -restore_exclude = ['yolov3/yolov3_head/Conv_14', 'yolov3/yolov3_head/Conv_6', 'yolov3/yolov3_head/Conv_22'] -# Choose the parts you want to finetune. List form. -# Set to None to train the whole model. -update_part = ['yolov3/yolov3_head'] - -### other training strategies -multi_scale_train = True # Whether to apply multi-scale training strategy. Image size varies from [320, 320] to [640, 640] by default. -use_label_smooth = True # Whether to use class label smoothing strategy. -use_focal_loss = True # Whether to apply focal loss on the conf loss. -use_mix_up = True # Whether to use mix up data augmentation strategy. -use_warm_up = True # whether to use warm up strategy to prevent from gradient exploding. -warm_up_epoch = 3 # Warm up training epoches. Set to a larger value if gradient explodes. - -### some constants in validation -# nms -nms_threshold = 0.45 # iou threshold in nms operation -score_threshold = 0.01 # threshold of the probability of the classes in nms operation, i.e. score = pred_confs * pred_probs. set lower for higher recall. -nms_topk = 150 # keep at most nms_topk outputs after nms -# mAP eval -eval_threshold = 0.5 # the iou threshold applied in mAP evaluation -use_voc_07_metric = False # whether to use voc 2007 evaluation metric, i.e. the 11-point metric - -### parse some params -anchors = parse_anchors(anchor_path) -classes = read_class_names(class_name_path) -class_num = len(classes) -train_img_cnt = len(open(train_file, 'r').readlines()) -val_img_cnt = len(open(val_file, 'r').readlines()) -train_batch_num = int(math.ceil(float(train_img_cnt) / batch_size)) - -lr_decay_freq = int(train_batch_num * lr_decay_epoch) -pw_boundaries = [float(i) * train_batch_num + global_step for i in pw_boundaries] \ No newline at end of file From 7076e92c1ccce5c6197786405a1d459994042856 Mon Sep 17 00:00:00 2001 From: Jack Hensley <36745629+jackattack1415@users.noreply.github.com> Date: Wed, 14 Aug 2019 10:35:14 +0100 Subject: [PATCH 58/65] delete test.py (not used) --- .../d04_modelling/transfer_learning/test.py | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/test.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test.py b/src/traffic_analysis/d04_modelling/transfer_learning/test.py deleted file mode 100644 index 5d81ac6..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/test.py +++ /dev/null @@ -1,6 +0,0 @@ -from traffic_analysis.d00_utils.get_project_directory import get_project_directory - -print(get_project_directory()) -print(str(1).zfill(5)) - -b = {'a': 5, 'd': 6} \ No newline at end of file From ba4a9d82b590c4d5f377f9526aa5a01348a4d9af Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 19 Aug 2019 17:44:15 +0100 Subject: [PATCH 59/65] added version to requirements --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a8d30f8..f76305f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,4 +20,4 @@ sqlalchemy pandas==0.24.2 psycopg2 seaborn>=0.9 -tqdm \ No newline at end of file +tqdm==4.33.0 \ No newline at end of file From d663fc3316a8bd1a9c11b185e1bcdee570c49cc4 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Mon, 19 Aug 2019 18:09:38 +0100 Subject: [PATCH 60/65] deleted unused files --- .../d04_modelling/transfer_learning/args.py | 88 --------------- .../transfer_learning/example.py | 100 ------------------ .../d04_modelling/transfer_learning/test.py | 6 -- 3 files changed, 194 deletions(-) delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/args.py delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/example.py delete mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/test.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/args.py b/src/traffic_analysis/d04_modelling/transfer_learning/args.py deleted file mode 100644 index f7d9cd9..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/args.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 -# This file contains the parameter used in train_tensorflow_model.py - -from __future__ import division, print_function - -from utils.misc_utils import parse_anchors, read_class_names -import math - -### Some paths -train_file = './data/my_data/train.txt' # The path of the training txt file. -val_file = './data/my_data/val.txt' # The path of the validation txt file. -restore_path = './data/darknet_weights/yolov3.ckpt' # The path of the weights to restore. -save_dir = './checkpoint/' # The directory of the weights to save. -log_dir = './data/logs/' # The directory to store the tensorboard log files. -progress_log_path = './data/progress.log' # The path to record the training progress. -anchor_path = './data/yolo_anchors.txt' # The path of the anchor txt file. -class_name_path = './data/coco.names' # The path of the class names. - -### Training releated numbers -batch_size = 6 -img_size = [416, 416] # Images will be resized to `img_size` and fed to the network, size format: [width, height] -letterbox_resize = True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. -total_epoches = 100 -train_evaluation_step = 100 # Evaluate on the training batch after some steps. -val_evaluation_epoch = 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. -save_epoch = 10 # Save the model after some epochs. -batch_norm_decay = 0.99 # decay in bn ops -weight_decay = 5e-4 # l2 weight decay -global_step = 0 # used when resuming training - -### tf.data parameters -num_threads = 10 # Number of threads for image processing used in tf.data pipeline. -prefetech_buffer = 5 # Prefetech_buffer used in tf.data pipeline. - -### Learning rate and optimizer -optimizer_name = 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] -save_optimizer = True # Whether to save the optimizer parameters into the checkpoint file. -learning_rate_init = 1e-4 -lr_type = 'piecewise' # Chosen from [fixed, exponential, cosine_decay, cosine_decay_restart, piecewise] -lr_decay_epoch = 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` and `cosine_decay_restart` lr_type. -lr_decay_factor = 0.96 # The learning rate decay factor. Used when chosen `exponential` lr_type. -lr_lower_bound = 1e-6 # The minimum learning rate. -# only used in piecewise lr type -pw_boundaries = [30, 50] # epoch based boundaries -pw_values = [learning_rate_init, 3e-5, 1e-5] - -### Load and finetune -# Choose the parts you want to restore the weights. List form. -# restore_include: None, restore_exclude: None => restore the whole model -# restore_include: None, restore_exclude: scope => restore the whole model except `scope` -# restore_include: scope1, restore_exclude: scope2 => if scope1 contains scope2, restore scope1 and not restore scope2 (scope1 - scope2) -# choise 1: only restore the darknet body -# restore_include = ['yolov3/darknet53_body'] -# restore_exclude = None -# choise 2: restore all layers except the last 3 conv2d layers in 3 scale -restore_include = None -restore_exclude = ['yolov3/yolov3_head/Conv_14', 'yolov3/yolov3_head/Conv_6', 'yolov3/yolov3_head/Conv_22'] -# Choose the parts you want to finetune. List form. -# Set to None to train the whole model. -update_part = ['yolov3/yolov3_head'] - -### other training strategies -multi_scale_train = True # Whether to apply multi-scale training strategy. Image size varies from [320, 320] to [640, 640] by default. -use_label_smooth = True # Whether to use class label smoothing strategy. -use_focal_loss = True # Whether to apply focal loss on the conf loss. -use_mix_up = True # Whether to use mix up data augmentation strategy. -use_warm_up = True # whether to use warm up strategy to prevent from gradient exploding. -warm_up_epoch = 3 # Warm up training epoches. Set to a larger value if gradient explodes. - -### some constants in validation -# nms -nms_threshold = 0.45 # iou threshold in nms operation -score_threshold = 0.01 # threshold of the probability of the classes in nms operation, i.e. score = pred_confs * pred_probs. set lower for higher recall. -nms_topk = 150 # keep at most nms_topk outputs after nms -# mAP eval -eval_threshold = 0.5 # the iou threshold applied in mAP evaluation -use_voc_07_metric = False # whether to use voc 2007 evaluation metric, i.e. the 11-point metric - -### parse some params -anchors = parse_anchors(anchor_path) -classes = read_class_names(class_name_path) -class_num = len(classes) -train_img_cnt = len(open(train_file, 'r').readlines()) -val_img_cnt = len(open(val_file, 'r').readlines()) -train_batch_num = int(math.ceil(float(train_img_cnt) / batch_size)) - -lr_decay_freq = int(train_batch_num * lr_decay_epoch) -pw_boundaries = [float(i) * train_batch_num + global_step for i in pw_boundaries] \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/example.py b/src/traffic_analysis/d04_modelling/transfer_learning/example.py deleted file mode 100644 index 5f075ec..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/example.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding: utf-8 - -import xml.etree.ElementTree as ET -import os - -names_dict = {} -cnt = 0 -f = open('./voc_names.txt', 'r').readlines() -for line in f: - line = line.strip() - names_dict[line] = cnt - cnt += 1 - -voc_07 = '/data/VOCdevkit/VOC2007' -voc_12 = '/data/VOCdevkit/VOC2012' - -anno_path = [os.path.join(voc_07, 'Annotations'), os.path.join(voc_12, 'Annotations')] -img_path = [os.path.join(voc_07, 'JPEGImages'), os.path.join(voc_12, 'JPEGImages')] - -trainval_path = [os.path.join(voc_07, 'ImageSets/Main/trainval.txt'), - os.path.join(voc_12, 'ImageSets/Main/trainval.txt')] -test_path = [os.path.join(voc_07, 'ImageSets/Main/test.txt')] - - -def parse_xml(path): - tree = ET.parse(path) - img_name = path.split('/')[-1][:-4] - - height = tree.findtext("./size/height") - width = tree.findtext("./size/width") - - objects = [img_name, width, height] - - for obj in tree.findall('object'): - difficult = obj.find('difficult').text - if difficult == '1': - continue - name = obj.find('name').text - bbox = obj.find('bndbox') - xmin = bbox.find('xmin').text - ymin = bbox.find('ymin').text - xmax = bbox.find('xmax').text - ymax = bbox.find('ymax').text - - name = str(names_dict[name]) - objects.extend([name, xmin, ymin, xmax, ymax]) - if len(objects) > 1: - return objects - else: - return None - - -test_cnt = 0 - - -def gen_test_txt(txt_path): - global test_cnt - f = open(txt_path, 'w') - - for i, path in enumerate(test_path): - img_names = open(path, 'r').readlines() - for img_name in img_names: - img_name = img_name.strip() - xml_path = anno_path[i] + '/' + img_name + '.xml' - objects = parse_xml(xml_path) - if objects: - objects[0] = img_path[i] + '/' + img_name + '.jpg' - if os.path.exists(objects[0]): - objects.insert(0, str(test_cnt)) - test_cnt += 1 - objects = ' '.join(objects) + '\n' - f.write(objects) - f.close() - - -train_cnt = 0 - - -def gen_train_txt(txt_path): - global train_cnt - f = open(txt_path, 'w') - - for i, path in enumerate(trainval_path): - img_names = open(path, 'r').readlines() - for img_name in img_names: - img_name = img_name.strip() - xml_path = anno_path[i] + '/' + img_name + '.xml' - objects = parse_xml(xml_path) - if objects: - objects[0] = img_path[i] + '/' + img_name + '.jpg' - if os.path.exists(objects[0]): - objects.insert(0, str(train_cnt)) - train_cnt += 1 - objects = ' '.join(objects) + '\n' - f.write(objects) - f.close() - - -gen_train_txt('train.txt') -gen_test_txt('val.txt') \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/test.py b/src/traffic_analysis/d04_modelling/transfer_learning/test.py deleted file mode 100644 index 5d81ac6..0000000 --- a/src/traffic_analysis/d04_modelling/transfer_learning/test.py +++ /dev/null @@ -1,6 +0,0 @@ -from traffic_analysis.d00_utils.get_project_directory import get_project_directory - -print(get_project_directory()) -print(str(1).zfill(5)) - -b = {'a': 5, 'd': 6} \ No newline at end of file From 3740c10caa84d438f72bc79c569e8789ad866236 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Tue, 20 Aug 2019 17:57:21 +0100 Subject: [PATCH 61/65] patching to address comments from recent pr --- conf/base/paths.yml | 1 - conf/base/training_parameters.yml | 10 +-- src/run_transfer_learning.py | 10 ++- .../tensorflow_training_utils.py | 38 +++++------ .../train_tensorflow_model.py | 68 ++++++++++++------- .../transfer_learning/training_data_loader.py | 2 +- 6 files changed, 74 insertions(+), 55 deletions(-) diff --git a/conf/base/paths.yml b/conf/base/paths.yml index 3a90244..7d44b6b 100644 --- a/conf/base/paths.yml +++ b/conf/base/paths.yml @@ -28,7 +28,6 @@ local_paths: plots: "plots/" annotations: "annotations/" local_detection_model: "data/ref/detection_model/" - training: "data/ref/training/" db_paths: db_host: "dssg-london.ck0oseycrr7s.eu-west-2.rds.amazonaws.com" diff --git a/conf/base/training_parameters.yml b/conf/base/training_parameters.yml index 7b4c2b1..89b02ac 100644 --- a/conf/base/training_parameters.yml +++ b/conf/base/training_parameters.yml @@ -1,9 +1,9 @@ training: num_batches : 10 - letterbox_resize : True # Whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. + letterbox_resize : True # Whether to use letterbox resize, i.e., keep the original aspect ratio in the resized img. total_epochs : 1000 train_evaluation_step : 100 # Evaluate on the training batch after some steps. - val_evaluation_epoch : 2 # Evaluate on the whole validation dataset after some epochs. Set to None to evaluate every epoch. + val_evaluation_epoch : 2 # Evaluate on the validation dataset after some epochs. Set to None to evaluate all epoch. save_epoch : 10 # Save the model after some epochs. batch_norm_decay : 0.99 # decay in bn ops weight_decay : 0.0005 # l2 weight decay @@ -17,8 +17,8 @@ learning: optimizer_name : 'momentum' # Chosen from [sgd, momentum, adam, rmsprop] save_optimizer : True # Whether to save the optimizer parameters into the checkpoint file. learning_rate_init : 0.0001 - lr_type : 'piecewise' # Chosen from [fixed, exponential, cosine_decay, cosine_decay_restart, piecewise] - lr_decay_epoch : 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` and `cosine_decay_restart` lr_type. + lr_type : 'piecewise' # Chosen from [exponential, piecewise] + lr_decay_epoch : 5 # Epochs after which learning rate decays. Int or float. Used when chosen `exponential` lr_type. lr_decay_factor : 0.96 # The learning rate decay factor. Used when chosen `exponential` lr_type. lr_lower_bound : 0.000001 # The minimum learning rate. pw_boundaries : [30, 50] # epoch based boundaries @@ -27,7 +27,7 @@ learning: validation: # nms nms_threshold : 0.45 # iou threshold in nms operation - score_threshold : 0.01 # threshold of the probability of the classes in nms operation, i.e. score = pred_confs * pred_probs. set lower for higher recall. + score_threshold : 0.01 # threshold of the prob of the classes in nms operation, i.e. score = pred_confs * pred_probs. nms_topk : 150 # keep at most nms_topk outputs after nms # mAP eval diff --git a/src/run_transfer_learning.py b/src/run_transfer_learning.py index d25e410..fd9f8af 100644 --- a/src/run_transfer_learning.py +++ b/src/run_transfer_learning.py @@ -1,6 +1,6 @@ from traffic_analysis.d00_utils.load_confs import load_paths, load_credentials, \ load_parameters, load_training_parameters -from traffic_analysis.d04_modelling.transfer_learning.training_data_loader import DataLoader, TransferDataset +from traffic_analysis.d04_modelling.transfer_learning.training_data_loader import TrainingDataLoader, TransferDataset from traffic_analysis.d04_modelling.transfer_learning.train_tensorflow_model import transfer_learn paths = load_paths() @@ -8,8 +8,12 @@ params = load_parameters() train_params = load_training_parameters() -dl = DataLoader(datasets=[TransferDataset.cvat, TransferDataset.detrac], creds=creds, paths=paths) -x_train, y_train, x_test, y_test = dl.get_train_and_test(.8) +training_data_loader = TrainingDataLoader(datasets=[TransferDataset.cvat, TransferDataset.detrac], + creds=creds, + paths=paths) + +fraction_for_training = 0.8 +x_train, y_train, x_test, y_test = training_data_loader.get_train_and_test(fraction_for_training) saved_text_files_dir = paths['temp_annotation'] diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index e093912..8c651de 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -14,7 +14,8 @@ iter_cnt = 0 -def get_batch_data(batch_line, class_num, img_size, anchors, mode, multi_scale=False, mix_up=False, letterbox_resize=True, interval=10): +def get_batch_data(batch_line, class_num, img_size, anchors, mode, multi_scale=False, + mix_up=False, letterbox_resize=True, interval=10): ''' generate a batch of imgs and labels param: @@ -23,9 +24,10 @@ def get_batch_data(batch_line, class_num, img_size, anchors, mode, multi_scale=F img_size: the image size to be resized to. format: [width, height]. anchors: anchors. shape: [9, 2]. mode: 'train' or 'val'. if set to 'train', data augmentation will be applied. - multi_scale: whether to use multi_scale training, img_size varies from [320, 320] to [640, 640] by default. Note that it will take effect only when mode is set to 'train'. - letterbox_resize: whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. - interval: change the scale of image every interval batches. Note that it's indeterministic because of the multi threading. + multi_scale: whether to use multi_scale training, img_size varies from [320, 320] to [640, 640] by default. + Note that it will take effect only when mode is set to 'train'. + letterbox_resize: whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized img. + interval: change the scale of image every interval batches. ''' global iter_cnt # multi_scale training @@ -49,7 +51,8 @@ def get_batch_data(batch_line, class_num, img_size, anchors, mode, multi_scale=F batch_line = mix_lines for line in batch_line: - img_idx, img, y_true_13, y_true_26, y_true_52 = parse_data(line, class_num, img_size, anchors, mode, letterbox_resize) + img_idx, img, y_true_13, y_true_26, y_true_52 = parse_data(line, class_num, img_size, anchors, mode, + letterbox_resize) img_idx_batch.append(img_idx) img_batch.append(img) @@ -57,7 +60,11 @@ def get_batch_data(batch_line, class_num, img_size, anchors, mode, multi_scale=F y_true_26_batch.append(y_true_26) y_true_52_batch.append(y_true_52) - img_idx_batch, img_batch, y_true_13_batch, y_true_26_batch, y_true_52_batch = np.asarray(img_idx_batch, np.int64), np.asarray(img_batch), np.asarray(y_true_13_batch), np.asarray(y_true_26_batch), np.asarray(y_true_52_batch) + img_idx_batch, img_batch, y_true_13_batch, y_true_26_batch, y_true_52_batch = np.asarray(img_idx_batch, np.int64), \ + np.asarray(img_batch), \ + np.asarray(y_true_13_batch), \ + np.asarray(y_true_26_batch), \ + np.asarray(y_true_52_batch) return img_idx_batch, img_batch, y_true_13_batch, y_true_26_batch, y_true_52_batch @@ -70,7 +77,7 @@ def parse_data(line, class_num, img_size, anchors, mode, letterbox_resize): img_size: the size of image to be resized to. [width, height] format. anchors: anchors. mode: 'train' or 'val'. When set to 'train', data_augmentation will be applied. - letterbox_resize: whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized image. + letterbox_resize: whether to use the letterbox resize, i.e., keep the original aspect ratio in the resized img. ''' if not isinstance(line, list): img_idx, pic_path, boxes, labels, _, _ = parse_line(line) @@ -206,13 +213,13 @@ def parse_line(line): if 'str' not in str(type(line)): line = line.decode() s = line.strip().split(' ') - assert len(s) > 8, 'Annotation error! Please check your annotation file. Make sure there is at least one target object in each image.' + assert len(s) > 8, 'Annotation error! Please check your file. Make sure there is an object in each image.' line_idx = int(s[0]) pic_path = s[1] img_width = int(s[2]) img_height = int(s[3]) s = s[4:] - assert len(s) % 5 == 0, 'Annotation error! Please check your annotation file. Maybe partially missing some coordinates?' + assert len(s) % 5 == 0, 'Annotation error! Please check your file. Maybe partially missing some coordinates?' box_cnt = len(s) // 5 boxes = [] labels = [] @@ -609,16 +616,6 @@ def config_learning_rate(lr_decay_freq, train_batch_num, global_step): train_params['lr_decay_factor'], staircase=True, name='exponential_learning_rate') return tf.maximum(lr_tmp, train_params['lr_lower_bound']) - # elif train_params['lr_type'] == 'cosine_decay': - # train_steps = (train_params['total_epoches'] - float(args.use_warm_up) * args.warm_up_epoch) * args.train_batch_num - # return args.lr_lower_bound + 0.5 * (args.learning_rate_init - args.lr_lower_bound) * \ - # (1 + tf.cos(global_step / train_steps * np.pi)) - # elif args.lr_type == 'cosine_decay_restart': - # return tf.train.cosine_decay_restarts(args.learning_rate_init, global_step, - # args.lr_decay_freq, t_mul=2.0, m_mul=1.0, - # name='cosine_decay_learning_rate_restart') - # elif args.lr_type == 'fixed': - # return tf.convert_to_tensor(args.learning_rate_init, name='fixed_learning_rate') elif train_params['lr_type'] == 'piecewise': train_params['pw_boundaries'] = [float(i) * train_batch_num + train_params['global_step'] for i in train_params['pw_boundaries']] @@ -821,7 +818,8 @@ def evaluate_on_cpu(y_pred, y_true, num_classes, calc_now=True, max_boxes=50, sc # pred_labels: [N] # N: Detected box number of the current image pred_boxes, pred_confs, pred_labels = cpu_nms(pred_boxes, pred_confs * pred_probs, num_classes, - max_boxes=max_boxes, score_thresh=score_thresh, iou_thresh=iou_thresh) + max_boxes=max_boxes, score_thresh=score_thresh, + iou_thresh=iou_thresh) # len: N pred_labels_list = [] if pred_labels is None else pred_labels.tolist() diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index bbdc3b3..fc83981 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -7,10 +7,9 @@ import numpy as np import logging from tqdm import trange -import random from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ - shuffle_and_overwrite, make_summary, config_learning_rate, config_optimizer, AverageMeter, \ + make_summary, config_learning_rate, config_optimizer, AverageMeter, \ evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors @@ -143,7 +142,7 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ train_op = optimizer.apply_gradients(clip_grad_var, global_step=global_step) if train_params['save_optimizer']: - print('Saving optimizer parameters to checkpoint! Remember to restore the global_step in the fine-tuning afterwards.') + print('Saving optimizer parameters to checkpoint! Remember to restore global_step in fine-tuning afterwards.') saver_to_save = tf.train.Saver() saver_best = tf.train.Saver() @@ -179,19 +178,23 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ loss_class.update(__loss[4], len(__y_pred[0])) if __global_step % train_params['train_evaluation_step'] == 0 and __global_step > 0: - # recall, precision = evaluate_on_cpu(__y_pred, __y_true, args.number_classes, args.nms_topk, args.score_threshold, args.nms_threshold) recall, precision = evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, - __y_pred, __y_true, number_classes, train_params['nms_threshold']) + __y_pred, __y_true, number_classes, + train_params['nms_threshold']) info = "Epoch: {}, global_step: {} | loss: total: {:.2f}, xy: {:.2f}, " \ - "wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format( - epoch, int(__global_step), loss_total.average, loss_xy.average, loss_wh.average, loss_conf.average, loss_class.average) + "wh: {:.2f}, conf: {:.2f}, class: {:.2f} | ".format(epoch, int(__global_step), + loss_total.average, loss_xy.average, + loss_wh.average, loss_conf.average, + loss_class.average) info += 'Last batch: rec: {:.3f}, prec: {:.3f} | lr: {:.5g}'.format(recall, precision, __lr) print(info) logging.info(info) - writer.add_summary(make_summary('evaluation/train_batch_recall', recall), global_step=__global_step) - writer.add_summary(make_summary('evaluation/train_batch_precision', precision), global_step=__global_step) + writer.add_summary(make_summary('evaluation/train_batch_recall', recall), + global_step=__global_step) + writer.add_summary(make_summary('evaluation/train_batch_precision', precision), + global_step=__global_step) if np.isnan(loss_total.average): print('****' * 10) @@ -201,8 +204,10 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ # NOTE: this is just demo. You can set the conditions when to save the weights. if epoch % train_params['save_epoch'] == 0 and epoch > 0: if loss_total.average <= 2.: - saver_to_save.save(sess, os.path.join(train_params['trained_model_name'], - 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format(epoch, int(__global_step), loss_total.average, __lr))) + saver_to_save.save(sess, + os.path.join(train_params['trained_model_name'], + 'model-epoch_{}_step_{}_loss_{:.4f}_lr_{:.5g}'.format( + epoch, int(__global_step), loss_total.average, __lr))) # switch to validation dataset for evaluation if epoch % train_params['val_evaluation_epoch'] == 0 and epoch >= train_params['warm_up_epoch']: @@ -216,7 +221,8 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ for j in trange(val_img_cnt): __image_ids, __y_pred, __loss = sess.run([image_ids, y_pred, loss], feed_dict={is_training: False}) - pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, __image_ids, __y_pred) + pred_content = get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, + pred_scores_flag, __image_ids, __y_pred) val_preds.extend(pred_content) val_loss_total.update(__loss[0]) val_loss_xy.update(__loss[1]) @@ -246,24 +252,36 @@ def transfer_learn(paths, params, train_params, train_file, test_file, selected_ ap_total.update(ap, 1) mAP = ap_total.average - info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format(rec_total.average, prec_total.average, mAP) + info += 'EVAL: Recall: {:.4f}, Precison: {:.4f}, mAP: {:.4f}\n'.format( + rec_total.average, prec_total.average, mAP) info += 'EVAL: loss: total: {:.2f}, xy: {:.2f}, wh: {:.2f}, conf: {:.2f}, class: {:.2f}\n'.format( - val_loss_total.average, val_loss_xy.average, val_loss_wh.average, val_loss_conf.average, val_loss_class.average) + val_loss_total.average, val_loss_xy.average, val_loss_wh.average, + val_loss_conf.average, val_loss_class.average) print(info) logging.info(info) if mAP > best_mAP: best_mAP = mAP - saver_best.save(sess, os.path.join(transfer_learn_model_dir, - 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format(epoch, int(__global_step), best_mAP, val_loss_total.average, __lr))) - - writer.add_summary(make_summary('evaluation/val_mAP', mAP), global_step=epoch) - writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), global_step=epoch) - writer.add_summary(make_summary('evaluation/val_precision', prec_total.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/total_loss', val_loss_total.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_xy', val_loss_xy.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_wh', val_loss_wh.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_conf', val_loss_conf.average), global_step=epoch) - writer.add_summary(make_summary('validation_statistics/loss_class', val_loss_class.average), global_step=epoch) + saver_best.save(sess, os.path.join( + transfer_learn_model_dir, + 'best_model_Epoch_{}_step_{}_mAP_{:.4f}_loss_{:.4f}_lr_{:.7g}'.format( + epoch, int(__global_step), best_mAP, val_loss_total.average, __lr))) + + writer.add_summary(make_summary('evaluation/val_mAP', mAP), + global_step=epoch) + writer.add_summary(make_summary('evaluation/val_recall', rec_total.average), + global_step=epoch) + writer.add_summary(make_summary('evaluation/val_precision', prec_total.average), + global_step=epoch) + writer.add_summary(make_summary('validation_statistics/total_loss', val_loss_total.average), + global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_xy', val_loss_xy.average), + global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_wh', val_loss_wh.average), + global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_conf', val_loss_conf.average), + global_step=epoch) + writer.add_summary(make_summary('validation_statistics/loss_class', val_loss_class.average), + global_step=epoch) return diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py index 7ab4dbb..29a5498 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/training_data_loader.py @@ -19,7 +19,7 @@ class TransferDataset(Enum): cvat = 2 -class DataLoader(object): +class TrainingDataLoader(object): def __init__(self, datasets, creds, paths): self.datasets = datasets From 61f855dde634db65703584a3509985acf1e087d7 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Wed, 21 Aug 2019 15:26:36 +0100 Subject: [PATCH 62/65] broke down tensorflow_training_utils into specific scripts --- .../tensorflow_evaluation_utils.py | 0 .../tensorflow_image_formatting_utils.py | 0 .../tensorflow_processing_utils.py | 0 .../tensorflow_training_utils.py | 910 +----------------- 4 files changed, 3 insertions(+), 907 deletions(-) create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py create mode 100644 src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py index 8c651de..47e094b 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_training_utils.py @@ -6,9 +6,10 @@ import random import tensorflow as tf from tensorflow.core.framework import summary_pb2 -from collections import Counter from traffic_analysis.d00_utils.load_confs import load_training_parameters +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_image_formatting_utils import ( + mix_up, resize_with_bbox, random_flip, random_color_distort, random_expand, random_crop_with_constraints) PY_VERSION = sys.version_info[0] iter_cnt = 0 @@ -233,378 +234,6 @@ def parse_line(line): return line_idx, pic_path, boxes, labels, img_width, img_height -def mix_up(img1, img2, bbox1, bbox2): - ''' - return: - mix_img: HWC format mix up image - mix_bbox: [N, 5] shape mix up bbox, i.e. `x_min, y_min, x_max, y_mix, mixup_weight`. - ''' - height = max(img1.shape[0], img2.shape[0]) - width = max(img1.shape[1], img2.shape[1]) - - mix_img = np.zeros(shape=(height, width, 3), dtype='float32') - - # rand_num = np.random.random() - rand_num = np.random.beta(1.5, 1.5) - rand_num = max(0, min(1, rand_num)) - mix_img[:img1.shape[0], :img1.shape[1], :] = img1.astype('float32') * rand_num - mix_img[:img2.shape[0], :img2.shape[1], :] += img2.astype('float32') * (1. - rand_num) - - mix_img = mix_img.astype('uint8') - - # the last element of the 2nd dimention is the mix up weight - bbox1 = np.concatenate((bbox1, np.full(shape=(bbox1.shape[0], 1), fill_value=rand_num)), axis=-1) - bbox2 = np.concatenate((bbox2, np.full(shape=(bbox2.shape[0], 1), fill_value=1. - rand_num)), axis=-1) - mix_bbox = np.concatenate((bbox1, bbox2), axis=0) - - return mix_img, mix_bbox - - -def bbox_crop(bbox, crop_box=None, allow_outside_center=True): - """Crop bounding boxes according to slice area. - This method is mainly used with image cropping to ensure bonding boxes fit - within the cropped image. - Parameters - ---------- - bbox : numpy.ndarray - Numpy.ndarray with shape (N, 4+) where N is the number of bounding boxes. - The second axis represents attributes of the bounding box. - Specifically, these are :math:`(x_{min}, y_{min}, x_{max}, y_{max})`, - we allow additional attributes other than coordinates, which stay intact - during bounding box transformations. - crop_box : tuple - Tuple of length 4. :math:`(x_{min}, y_{min}, width, height)` - allow_outside_center : bool - If `False`, remove bounding boxes which have centers outside cropping area. - Returns - ------- - numpy.ndarray - Cropped bounding boxes with shape (M, 4+) where M <= N. - """ - bbox = bbox.copy() - if crop_box is None: - return bbox - if not len(crop_box) == 4: - raise ValueError( - "Invalid crop_box parameter, requires length 4, given {}".format(str(crop_box))) - if sum([int(c is None) for c in crop_box]) == 4: - return bbox - - l, t, w, h = crop_box - - left = l if l else 0 - top = t if t else 0 - right = left + (w if w else np.inf) - bottom = top + (h if h else np.inf) - crop_bbox = np.array((left, top, right, bottom)) - - if allow_outside_center: - mask = np.ones(bbox.shape[0], dtype=bool) - else: - centers = (bbox[:, :2] + bbox[:, 2:4]) / 2 - mask = np.logical_and(crop_bbox[:2] <= centers, centers < crop_bbox[2:]).all(axis=1) - - # transform borders - bbox[:, :2] = np.maximum(bbox[:, :2], crop_bbox[:2]) - bbox[:, 2:4] = np.minimum(bbox[:, 2:4], crop_bbox[2:4]) - bbox[:, :2] -= crop_bbox[:2] - bbox[:, 2:4] -= crop_bbox[:2] - - mask = np.logical_and(mask, (bbox[:, :2] < bbox[:, 2:4]).all(axis=1)) - bbox = bbox[mask] - return bbox - - -def bbox_iou(bbox_a, bbox_b, offset=0): - """Calculate Intersection-Over-Union(IOU) of two bounding boxes. - Parameters - ---------- - bbox_a : numpy.ndarray - An ndarray with shape :math:`(N, 4)`. - bbox_b : numpy.ndarray - An ndarray with shape :math:`(M, 4)`. - offset : float or int, default is 0 - The ``offset`` is used to control the whether the width(or height) is computed as - (right - left + ``offset``). - Note that the offset must be 0 for normalized bboxes, whose ranges are in ``[0, 1]``. - Returns - ------- - numpy.ndarray - An ndarray with shape :math:`(N, M)` indicates IOU between each pairs of - bounding boxes in `bbox_a` and `bbox_b`. - """ - if bbox_a.shape[1] < 4 or bbox_b.shape[1] < 4: - raise IndexError("Bounding boxes axis 1 must have at least length 4") - - tl = np.maximum(bbox_a[:, None, :2], bbox_b[:, :2]) - br = np.minimum(bbox_a[:, None, 2:4], bbox_b[:, 2:4]) - - area_i = np.prod(br - tl + offset, axis=2) * (tl < br).all(axis=2) - area_a = np.prod(bbox_a[:, 2:4] - bbox_a[:, :2] + offset, axis=1) - area_b = np.prod(bbox_b[:, 2:4] - bbox_b[:, :2] + offset, axis=1) - return area_i / (area_a[:, None] + area_b - area_i) - - -def random_crop_with_constraints(bbox, size, min_scale=0.3, max_scale=1, - max_aspect_ratio=2, constraints=None, - max_trial=50): - """Crop an image randomly with bounding box constraints. - This data augmentation is used in training of - Single Shot Multibox Detector [#]_. More details can be found in - data augmentation section of the original paper. - .. [#] Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, - Scott Reed, Cheng-Yang Fu, Alexander C. Berg. - SSD: Single Shot MultiBox Detector. ECCV 2016. - Parameters - ---------- - bbox : numpy.ndarray - Numpy.ndarray with shape (N, 4+) where N is the number of bounding boxes. - The second axis represents attributes of the bounding box. - Specifically, these are :math:`(x_{min}, y_{min}, x_{max}, y_{max})`, - we allow additional attributes other than coordinates, which stay intact - during bounding box transformations. - size : tuple - Tuple of length 2 of image shape as (width, height). - min_scale : float - The minimum ratio between a cropped region and the original image. - The default value is :obj:`0.3`. - max_scale : float - The maximum ratio between a cropped region and the original image. - The default value is :obj:`1`. - max_aspect_ratio : float - The maximum aspect ratio of cropped region. - The default value is :obj:`2`. - constraints : iterable of tuples - An iterable of constraints. - Each constraint should be :obj:`(min_iou, max_iou)` format. - If means no constraint if set :obj:`min_iou` or :obj:`max_iou` to :obj:`None`. - If this argument defaults to :obj:`None`, :obj:`((0.1, None), (0.3, None), - (0.5, None), (0.7, None), (0.9, None), (None, 1))` will be used. - max_trial : int - Maximum number of trials for each constraint before exit no matter what. - Returns - ------- - numpy.ndarray - Cropped bounding boxes with shape :obj:`(M, 4+)` where M <= N. - tuple - Tuple of length 4 as (x_offset, y_offset, new_width, new_height). - """ - # default params in paper - if constraints is None: - constraints = ( - (0.1, None), - (0.3, None), - (0.5, None), - (0.7, None), - (0.9, None), - (None, 1), - ) - - w, h = size - - candidates = [(0, 0, w, h)] - for min_iou, max_iou in constraints: - min_iou = -np.inf if min_iou is None else min_iou - max_iou = np.inf if max_iou is None else max_iou - - for _ in range(max_trial): - scale = random.uniform(min_scale, max_scale) - aspect_ratio = random.uniform( - max(1 / max_aspect_ratio, scale * scale), - min(max_aspect_ratio, 1 / (scale * scale))) - crop_h = int(h * scale / np.sqrt(aspect_ratio)) - crop_w = int(w * scale * np.sqrt(aspect_ratio)) - - crop_t = random.randrange(h - crop_h) - crop_l = random.randrange(w - crop_w) - crop_bb = np.array((crop_l, crop_t, crop_l + crop_w, crop_t + crop_h)) - - if len(bbox) == 0: - top, bottom = crop_t, crop_t + crop_h - left, right = crop_l, crop_l + crop_w - return bbox, (left, top, right-left, bottom-top) - - iou = bbox_iou(bbox, crop_bb[np.newaxis]) - if min_iou <= iou.min() and iou.max() <= max_iou: - top, bottom = crop_t, crop_t + crop_h - left, right = crop_l, crop_l + crop_w - candidates.append((left, top, right-left, bottom-top)) - break - - # random select one - while candidates: - crop = candidates.pop(np.random.randint(0, len(candidates))) - new_bbox = bbox_crop(bbox, crop, allow_outside_center=False) - if new_bbox.size < 1: - continue - new_crop = (crop[0], crop[1], crop[2], crop[3]) - return new_bbox, new_crop - return bbox, (0, 0, w, h) - - -def random_color_distort(img, brightness_delta=32, hue_vari=18, sat_vari=0.5, val_vari=0.5): - ''' - randomly distort image color. Adjust brightness, hue, saturation, value. - param: - img: a BGR uint8 format OpenCV image. HWC format. - ''' - - def random_hue(img_hsv, hue_vari, p=0.5): - if np.random.uniform(0, 1) > p: - hue_delta = np.random.randint(-hue_vari, hue_vari) - img_hsv[:, :, 0] = (img_hsv[:, :, 0] + hue_delta) % 180 - return img_hsv - - def random_saturation(img_hsv, sat_vari, p=0.5): - if np.random.uniform(0, 1) > p: - sat_mult = 1 + np.random.uniform(-sat_vari, sat_vari) - img_hsv[:, :, 1] *= sat_mult - return img_hsv - - def random_value(img_hsv, val_vari, p=0.5): - if np.random.uniform(0, 1) > p: - val_mult = 1 + np.random.uniform(-val_vari, val_vari) - img_hsv[:, :, 2] *= val_mult - return img_hsv - - def random_brightness(img, brightness_delta, p=0.5): - if np.random.uniform(0, 1) > p: - img = img.astype(np.float32) - brightness_delta = int(np.random.uniform(-brightness_delta, brightness_delta)) - img = img + brightness_delta - return np.clip(img, 0, 255) - - # brightness - img = random_brightness(img, brightness_delta) - img = img.astype(np.uint8) - - # color jitter - img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV).astype(np.float32) - - if np.random.randint(0, 2): - img_hsv = random_value(img_hsv, val_vari) - img_hsv = random_saturation(img_hsv, sat_vari) - img_hsv = random_hue(img_hsv, hue_vari) - else: - img_hsv = random_saturation(img_hsv, sat_vari) - img_hsv = random_hue(img_hsv, hue_vari) - img_hsv = random_value(img_hsv, val_vari) - - img_hsv = np.clip(img_hsv, 0, 255) - img = cv2.cvtColor(img_hsv.astype(np.uint8), cv2.COLOR_HSV2BGR) - - return img - - -def letterbox_resize(img, new_width, new_height, interp=0): - ''' - Letterbox resize. keep the original aspect ratio in the resized image. - ''' - ori_height, ori_width = img.shape[:2] - - resize_ratio = min(new_width / ori_width, new_height / ori_height) - - resize_w = int(resize_ratio * ori_width) - resize_h = int(resize_ratio * ori_height) - - img = cv2.resize(img, (resize_w, resize_h), interpolation=interp) - image_padded = np.full((new_height, new_width, 3), 128, np.uint8) - - dw = int((new_width - resize_w) / 2) - dh = int((new_height - resize_h) / 2) - - image_padded[dh: resize_h + dh, dw: resize_w + dw, :] = img - - return image_padded, resize_ratio, dw, dh - - -def resize_with_bbox(img, bbox, new_width, new_height, interp=0, letterbox=False): - ''' - Resize the image and correct the bbox accordingly. - ''' - - if letterbox: - image_padded, resize_ratio, dw, dh = letterbox_resize(img, new_width, new_height, interp) - - # xmin, xmax - bbox[:, [0, 2]] = bbox[:, [0, 2]] * resize_ratio + dw - # ymin, ymax - bbox[:, [1, 3]] = bbox[:, [1, 3]] * resize_ratio + dh - - return image_padded, bbox - else: - ori_height, ori_width = img.shape[:2] - - img = cv2.resize(img, (new_width, new_height), interpolation=interp) - - # xmin, xmax - bbox[:, [0, 2]] = bbox[:, [0, 2]] / ori_width * new_width - # ymin, ymax - bbox[:, [1, 3]] = bbox[:, [1, 3]] / ori_height * new_height - - return img, bbox - - -def random_flip(img, bbox, px=0, py=0): - ''' - Randomly flip the image and correct the bbox. - param: - px: - the probability of horizontal flip - py: - the probability of vertical flip - ''' - height, width = img.shape[:2] - if np.random.uniform(0, 1) < px: - img = cv2.flip(img, 1) - xmax = width - bbox[:, 0] - xmin = width - bbox[:, 2] - bbox[:, 0] = xmin - bbox[:, 2] = xmax - - if np.random.uniform(0, 1) < py: - img = cv2.flip(img, 0) - ymax = height - bbox[:, 1] - ymin = height - bbox[:, 3] - bbox[:, 1] = ymin - bbox[:, 3] = ymax - return img, bbox - - -def random_expand(img, bbox, max_ratio=4, fill=0, keep_ratio=True): - ''' - Random expand original image with borders, this is identical to placing - the original image on a larger canvas. - param: - max_ratio : - Maximum ratio of the output image on both direction(vertical and horizontal) - fill : - The value(s) for padded borders. - keep_ratio : bool - If `True`, will keep output image the same aspect ratio as input. - ''' - h, w, c = img.shape - ratio_x = random.uniform(1, max_ratio) - if keep_ratio: - ratio_y = ratio_x - else: - ratio_y = random.uniform(1, max_ratio) - - oh, ow = int(h * ratio_y), int(w * ratio_x) - off_y = random.randint(0, oh - h) - off_x = random.randint(0, ow - w) - - dst = np.full(shape=(oh, ow, c), fill_value=fill, dtype=img.dtype) - - dst[off_y:off_y + h, off_x:off_x + w, :] = img - - # correct bbox - bbox[:, :2] += (off_x, off_y) - bbox[:, 2:4] += (off_x, off_y) - - return dst, bbox - - def make_summary(name, val): return summary_pb2.Summary(value=[summary_pb2.Summary.Value(tag=name, simple_value=val)]) @@ -656,542 +285,9 @@ def update(self, val, n=1): self.average = self.sum / float(self.count) -def cpu_nms(boxes, scores, num_classes, max_boxes=50, score_thresh=0.5, iou_thresh=0.5): - """ - Perform NMS on CPU. - Arguments: - boxes: shape [1, 10647, 4] - scores: shape [1, 10647, num_classes] - """ - - boxes = boxes.reshape(-1, 4) - scores = scores.reshape(-1, num_classes) - # Picked bounding boxes - picked_boxes, picked_score, picked_label = [], [], [] - - for i in range(num_classes): - indices = np.where(scores[:,i] >= score_thresh) - filter_boxes = boxes[indices] - filter_scores = scores[:,i][indices] - if len(filter_boxes) == 0: - continue - # do non_max_suppression on the cpu - indices = py_nms(filter_boxes, filter_scores, - max_boxes=max_boxes, iou_thresh=iou_thresh) - picked_boxes.append(filter_boxes[indices]) - picked_score.append(filter_scores[indices]) - picked_label.append(np.ones(len(indices), dtype='int32')*i) - if len(picked_boxes) == 0: - return None, None, None - - boxes = np.concatenate(picked_boxes, axis=0) - score = np.concatenate(picked_score, axis=0) - label = np.concatenate(picked_label, axis=0) - - return boxes, score, label - - - -def py_nms(boxes, scores, max_boxes=50, iou_thresh=0.5): - """ - Pure Python NMS baseline. - - Arguments: boxes: shape of [-1, 4], the value of '-1' means that dont know the - exact number of boxes - scores: shape of [-1,] - max_boxes: representing the maximum of boxes to be selected by non_max_suppression - iou_thresh: representing iou_threshold for deciding to keep boxes - """ - assert boxes.shape[1] == 4 and len(scores.shape) == 1 - - x1 = boxes[:, 0] - y1 = boxes[:, 1] - x2 = boxes[:, 2] - y2 = boxes[:, 3] - - areas = (x2 - x1) * (y2 - y1) - order = scores.argsort()[::-1] - - keep = [] - while order.size > 0: - i = order[0] - keep.append(i) - xx1 = np.maximum(x1[i], x1[order[1:]]) - yy1 = np.maximum(y1[i], y1[order[1:]]) - xx2 = np.minimum(x2[i], x2[order[1:]]) - yy2 = np.minimum(y2[i], y2[order[1:]]) - - w = np.maximum(0.0, xx2 - xx1 + 1) - h = np.maximum(0.0, yy2 - yy1 + 1) - inter = w * h - ovr = inter / (areas[i] + areas[order[1:]] - inter) - - inds = np.where(ovr <= iou_thresh)[0] - order = order[inds + 1] - - return keep[:max_boxes] - - -def calc_iou(pred_boxes, true_boxes): - ''' - Maintain an efficient way to calculate the ios matrix using the numpy broadcast tricks. - shape_info: pred_boxes: [N, 4] - true_boxes: [V, 4] - return: IoU matrix: shape: [N, V] - ''' - - # [N, 1, 4] - pred_boxes = np.expand_dims(pred_boxes, -2) - # [1, V, 4] - true_boxes = np.expand_dims(true_boxes, 0) - - # [N, 1, 2] & [1, V, 2] ==> [N, V, 2] - intersect_mins = np.maximum(pred_boxes[..., :2], true_boxes[..., :2]) - intersect_maxs = np.minimum(pred_boxes[..., 2:], true_boxes[..., 2:]) - intersect_wh = np.maximum(intersect_maxs - intersect_mins, 0.) - - # shape: [N, V] - intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1] - # shape: [N, 1, 2] - pred_box_wh = pred_boxes[..., 2:] - pred_boxes[..., :2] - # shape: [N, 1] - pred_box_area = pred_box_wh[..., 0] * pred_box_wh[..., 1] - # [1, V, 2] - true_boxes_wh = true_boxes[..., 2:] - true_boxes[..., :2] - # [1, V] - true_boxes_area = true_boxes_wh[..., 0] * true_boxes_wh[..., 1] - - # shape: [N, V] - iou = intersect_area / (pred_box_area + true_boxes_area - intersect_area + 1e-10) - - return iou - - -def evaluate_on_cpu(y_pred, y_true, num_classes, calc_now=True, max_boxes=50, score_thresh=0.5, iou_thresh=0.5): - ''' - Given y_pred and y_true of a batch of data, get the recall and precision of the current batch. - ''' - - num_images = y_true[0].shape[0] - true_labels_dict = {i: 0 for i in range(num_classes)} # {class: count} - pred_labels_dict = {i: 0 for i in range(num_classes)} - true_positive_dict = {i: 0 for i in range(num_classes)} - - for i in range(num_images): - true_labels_list, true_boxes_list = [], [] - for j in range(3): # three feature maps - # shape: [13, 13, 3, 80] - true_probs_temp = y_true[j][i][..., 5:-1] - # shape: [13, 13, 3, 4] (x_center, y_center, w, h) - true_boxes_temp = y_true[j][i][..., 0:4] - - # [13, 13, 3] - object_mask = true_probs_temp.sum(axis=-1) > 0 - - # [V, 3] V: Ground truth number of the current image - true_probs_temp = true_probs_temp[object_mask] - # [V, 4] - true_boxes_temp = true_boxes_temp[object_mask] - - # [V], labels - true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist() - # [V, 4] (x_center, y_center, w, h) - true_boxes_list += true_boxes_temp.tolist() - - if len(true_labels_list) != 0: - for cls, count in Counter(true_labels_list).items(): - true_labels_dict[cls] += count - - # [V, 4] (xmin, ymin, xmax, ymax) - true_boxes = np.array(true_boxes_list) - box_centers, box_sizes = true_boxes[:, 0:2], true_boxes[:, 2:4] - true_boxes[:, 0:2] = box_centers - box_sizes / 2. - true_boxes[:, 2:4] = true_boxes[:, 0:2] + box_sizes - - # [1, xxx, 4] - pred_boxes = y_pred[0][i:i + 1] - pred_confs = y_pred[1][i:i + 1] - pred_probs = y_pred[2][i:i + 1] - - # pred_boxes: [N, 4] - # pred_confs: [N] - # pred_labels: [N] - # N: Detected box number of the current image - pred_boxes, pred_confs, pred_labels = cpu_nms(pred_boxes, pred_confs * pred_probs, num_classes, - max_boxes=max_boxes, score_thresh=score_thresh, - iou_thresh=iou_thresh) - - # len: N - pred_labels_list = [] if pred_labels is None else pred_labels.tolist() - if pred_labels_list == []: - continue - - # calc iou - # [N, V] - iou_matrix = calc_iou(pred_boxes, true_boxes) - # [N] - max_iou_idx = np.argmax(iou_matrix, axis=-1) - - correct_idx = [] - correct_conf = [] - for k in range(max_iou_idx.shape[0]): - pred_labels_dict[pred_labels_list[k]] += 1 - match_idx = max_iou_idx[k] # V level - if iou_matrix[k, match_idx] > iou_thresh and true_labels_list[match_idx] == pred_labels_list[k]: - if match_idx not in correct_idx: - correct_idx.append(match_idx) - correct_conf.append(pred_confs[k]) - else: - same_idx = correct_idx.index(match_idx) - if pred_confs[k] > correct_conf[same_idx]: - correct_idx.pop(same_idx) - correct_conf.pop(same_idx) - correct_idx.append(match_idx) - correct_conf.append(pred_confs[k]) - - for t in correct_idx: - true_positive_dict[true_labels_list[t]] += 1 - - if calc_now: - # avoid divided by 0 - recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6) - precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6) - - return recall, precision - else: - return true_positive_dict, true_labels_dict, pred_labels_dict - - -def evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, - y_pred, y_true, num_classes, iou_thresh=0.5, calc_now=True): - ''' - Given y_pred and y_true of a batch of data, get the recall and precision of the current batch. - This function will perform gpu operation on the GPU. - ''' - - num_images = y_true[0].shape[0] - true_labels_dict = {i: 0 for i in range(num_classes)} # {class: count} - pred_labels_dict = {i: 0 for i in range(num_classes)} - true_positive_dict = {i: 0 for i in range(num_classes)} - - for i in range(num_images): - true_labels_list, true_boxes_list = [], [] - for j in range(3): # three feature maps - # shape: [13, 13, 3, 80] - true_probs_temp = y_true[j][i][..., 5:-1] - # shape: [13, 13, 3, 4] (x_center, y_center, w, h) - true_boxes_temp = y_true[j][i][..., 0:4] - - # [13, 13, 3] - object_mask = true_probs_temp.sum(axis=-1) > 0 - - # [V, 80] V: Ground truth number of the current image - true_probs_temp = true_probs_temp[object_mask] - # [V, 4] - true_boxes_temp = true_boxes_temp[object_mask] - - # [V], labels, each from 0 to 79 - true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist() - # [V, 4] (x_center, y_center, w, h) - true_boxes_list += true_boxes_temp.tolist() - - if len(true_labels_list) != 0: - for cls, count in Counter(true_labels_list).items(): - true_labels_dict[cls] += count - - # [V, 4] (xmin, ymin, xmax, ymax) - true_boxes = np.array(true_boxes_list) - box_centers, box_sizes = true_boxes[:, 0:2], true_boxes[:, 2:4] - true_boxes[:, 0:2] = box_centers - box_sizes / 2. - true_boxes[:, 2:4] = true_boxes[:, 0:2] + box_sizes - - # [1, xxx, 4] - pred_boxes = y_pred[0][i:i + 1] - pred_confs = y_pred[1][i:i + 1] - pred_probs = y_pred[2][i:i + 1] - - # pred_boxes: [N, 4] - # pred_confs: [N] - # pred_labels: [N] - # N: Detected box number of the current image - pred_boxes, pred_confs, pred_labels = sess.run(gpu_nms_op, - feed_dict={pred_boxes_flag: pred_boxes, - pred_scores_flag: pred_confs * pred_probs}) - # len: N - pred_labels_list = [] if pred_labels is None else pred_labels.tolist() - if pred_labels_list == []: - continue - - # calc iou - # [N, V] - iou_matrix = calc_iou(pred_boxes, true_boxes) - # [N] - max_iou_idx = np.argmax(iou_matrix, axis=-1) - - correct_idx = [] - correct_conf = [] - for k in range(max_iou_idx.shape[0]): - pred_labels_dict[pred_labels_list[k]] += 1 - match_idx = max_iou_idx[k] # V level - if iou_matrix[k, match_idx] > iou_thresh and true_labels_list[match_idx] == pred_labels_list[k]: - if match_idx not in correct_idx: - correct_idx.append(match_idx) - correct_conf.append(pred_confs[k]) - else: - same_idx = correct_idx.index(match_idx) - if pred_confs[k] > correct_conf[same_idx]: - correct_idx.pop(same_idx) - correct_conf.pop(same_idx) - correct_idx.append(match_idx) - correct_conf.append(pred_confs[k]) - - for t in correct_idx: - true_positive_dict[true_labels_list[t]] += 1 - - if calc_now: - # avoid divided by 0 - recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6) - precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6) - - return recall, precision - else: - return true_positive_dict, true_labels_dict, pred_labels_dict - - -def voc_eval(gt_dict, val_preds, classidx, iou_thres=0.5, use_07_metric=False): - ''' - Top level function that does the PASCAL VOC evaluation. - ''' - # 1.obtain gt: extract all gt objects for this class - class_recs = {} - npos = 0 - for img_id in gt_dict: - R = [obj for obj in gt_dict[img_id] if obj[-1] == classidx] - bbox = np.array([x[:4] for x in R]) - det = [False] * len(R) - npos += len(R) - class_recs[img_id] = {'bbox': bbox, 'det': det} - - # 2. obtain pred results - pred = [x for x in val_preds if x[-1] == classidx] - img_ids = [x[0] for x in pred] - confidence = np.array([x[-2] for x in pred]) - BB = np.array([[x[1], x[2], x[3], x[4]] for x in pred]) - - # 3. sort by confidence - sorted_ind = np.argsort(-confidence) - try: - BB = BB[sorted_ind, :] - except: - print('no box, ignore') - return 1e-6, 1e-6, 0, 0, 0 - img_ids = [img_ids[x] for x in sorted_ind] - - # 4. mark TPs and FPs - nd = len(img_ids) - tp = np.zeros(nd) - fp = np.zeros(nd) - - for d in range(nd): - # all the gt info in some image - R = class_recs[img_ids[d]] - bb = BB[d, :] - ovmax = -np.Inf - BBGT = R['bbox'] - - if BBGT.size > 0: - # calc iou - # intersection - ixmin = np.maximum(BBGT[:, 0], bb[0]) - iymin = np.maximum(BBGT[:, 1], bb[1]) - ixmax = np.minimum(BBGT[:, 2], bb[2]) - iymax = np.minimum(BBGT[:, 3], bb[3]) - iw = np.maximum(ixmax - ixmin + 1., 0.) - ih = np.maximum(iymax - iymin + 1., 0.) - inters = iw * ih - - # union - uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) + (BBGT[:, 2] - BBGT[:, 0] + 1.) * ( - BBGT[:, 3] - BBGT[:, 1] + 1.) - inters) - - overlaps = inters / uni - ovmax = np.max(overlaps) - jmax = np.argmax(overlaps) - - if ovmax > iou_thres: - # gt not matched yet - if not R['det'][jmax]: - tp[d] = 1. - R['det'][jmax] = 1 - else: - fp[d] = 1. - else: - fp[d] = 1. - - # compute precision recall - fp = np.cumsum(fp) - tp = np.cumsum(tp) - rec = tp / float(npos) - # avoid divide by zero in case the first detection matches a difficult - # ground truth - prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) - ap = voc_ap(rec, prec, use_07_metric) - - # return rec, prec, ap - return npos, nd, tp[-1] / float(npos), tp[-1] / float(nd), ap - - - -gt_dict = {} # key: img_id, value: gt object list -def parse_gt_rec(gt_filename, target_img_size, letterbox_resize=True): - ''' - parse and re-organize the gt info. - return: - gt_dict: dict. Each key is a img_id, the value is the gt bboxes in the corresponding img. - ''' - - global gt_dict - - if not gt_dict: - new_width, new_height = target_img_size - with open(gt_filename, 'r') as f: - for line in f: - img_id, pic_path, boxes, labels, ori_width, ori_height = parse_line(line) - - objects = [] - for i in range(len(labels)): - x_min, y_min, x_max, y_max = boxes[i] - label = labels[i] - - if letterbox_resize: - resize_ratio = min(new_width / ori_width, new_height / ori_height) - - resize_w = int(resize_ratio * ori_width) - resize_h = int(resize_ratio * ori_height) - - dw = int((new_width - resize_w) / 2) - dh = int((new_height - resize_h) / 2) - - objects.append([x_min * resize_ratio + dw, - y_min * resize_ratio + dh, - x_max * resize_ratio + dw, - y_max * resize_ratio + dh, - label]) - else: - objects.append([x_min * new_width / ori_width, - y_min * new_height / ori_height, - x_max * new_width / ori_width, - y_max * new_height / ori_height, - label]) - gt_dict[img_id] = objects - return gt_dict - - -def gpu_nms(boxes, scores, num_classes, max_boxes=50, score_thresh=0.5, nms_thresh=0.5): - """ - Perform NMS on GPU using TensorFlow. - - params: - boxes: tensor of shape [1, 10647, 4] # 10647=(13*13+26*26+52*52)*3, for input 416*416 image - scores: tensor of shape [1, 10647, num_classes], score=conf*prob - num_classes: total number of classes - max_boxes: integer, maximum number of predicted boxes you'd like, default is 50 - score_thresh: if [ highest class probability score < score_threshold] - then get rid of the corresponding box - nms_thresh: real value, "intersection over union" threshold used for NMS filtering - """ - - boxes_list, label_list, score_list = [], [], [] - max_boxes = tf.constant(max_boxes, dtype='int32') - - # since we do nms for single image, then reshape it - boxes = tf.reshape(boxes, [-1, 4]) # '-1' means we don't konw the exact number of boxes - score = tf.reshape(scores, [-1, num_classes]) - - # Step 1: Create a filtering mask based on "box_class_scores" by using "threshold". - mask = tf.greater_equal(score, tf.constant(score_thresh)) - # Step 2: Do non_max_suppression for each class - for i in range(num_classes): - # Step 3: Apply the mask to scores, boxes and pick them out - filter_boxes = tf.boolean_mask(boxes, mask[:,i]) - filter_score = tf.boolean_mask(score[:,i], mask[:,i]) - nms_indices = tf.image.non_max_suppression(boxes=filter_boxes, - scores=filter_score, - max_output_size=max_boxes, - iou_threshold=nms_thresh, name='nms_indices') - label_list.append(tf.ones_like(tf.gather(filter_score, nms_indices), 'int32')*i) - boxes_list.append(tf.gather(filter_boxes, nms_indices)) - score_list.append(tf.gather(filter_score, nms_indices)) - - boxes = tf.concat(boxes_list, axis=0) - score = tf.concat(score_list, axis=0) - label = tf.concat(label_list, axis=0) - - return boxes, score, label - - -def get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, image_ids, y_pred): - ''' - Given the y_pred of an input image, get the predicted bbox and label info. - return: - pred_content: 2d list. - ''' - image_id = image_ids[0] - - # keep the first dimension 1 - pred_boxes = y_pred[0][0:1] - pred_confs = y_pred[1][0:1] - pred_probs = y_pred[2][0:1] - - boxes, scores, labels = sess.run(gpu_nms_op, - feed_dict={pred_boxes_flag: pred_boxes, - pred_scores_flag: pred_confs * pred_probs}) - - pred_content = [] - for i in range(len(labels)): - x_min, y_min, x_max, y_max = boxes[i] - score = scores[i] - label = labels[i] - pred_content.append([image_id, x_min, y_min, x_max, y_max, score, label]) - - return pred_content - - -def voc_ap(rec, prec, use_07_metric=True): - """Compute VOC AP given precision and recall. If use_07_metric is true, uses - the VOC 07 11-point method (default:False). - """ - if use_07_metric: - # 11 point metric - ap = 0. - for t in np.arange(0., 1.1, 0.1): - if np.sum(rec >= t) == 0: - p = 0 - else: - p = np.max(prec[rec >= t]) - ap = ap + p / 11. - else: - # correct AP calculation - # first append sentinel values at the end - mrec = np.concatenate(([0.], rec, [1.])) - mpre = np.concatenate(([0.], prec, [0.])) - - # compute the precision envelope - for i in range(mpre.size - 1, 0, -1): - mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) - - # to calculate area under PR curve, look for points - # where X axis (recall) changes value - i = np.where(mrec[1:] != mrec[:-1])[0] - - # and sum (\Delta recall) * prec - ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) - return ap - - def shuffle_and_overwrite(file_name): content = open(file_name, 'r').readlines() random.shuffle(content) with open(file_name, 'w') as f: for line in content: - f.write(line) \ No newline at end of file + f.write(line) From 0e56977e2ad11827682fa1310895aa55504fb52f Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Wed, 21 Aug 2019 16:23:03 +0100 Subject: [PATCH 63/65] patching --- .../tensorflow_evaluation_utils.py | 421 ++++++++++++++++++ .../tensorflow_image_formatting_utils.py | 381 ++++++++++++++++ .../tensorflow_processing_utils.py | 127 ++++++ .../train_tensorflow_model.py | 6 +- 4 files changed, 933 insertions(+), 2 deletions(-) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py index e69de29..c7bac86 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_evaluation_utils.py @@ -0,0 +1,421 @@ +from __future__ import division, print_function + +import numpy as np +import sys +from collections import Counter + +PY_VERSION = sys.version_info[0] +iter_cnt = 0 + + +def voc_ap(rec, prec, use_07_metric=True): + """Compute VOC AP given precision and recall. If use_07_metric is true, uses + the VOC 07 11-point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0. + for t in np.arange(0., 1.1, 0.1): + if np.sum(rec >= t) == 0: + p = 0 + else: + p = np.max(prec[rec >= t]) + ap = ap + p / 11. + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], rec, [1.])) + mpre = np.concatenate(([0.], prec, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + + +def get_preds_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, image_ids, y_pred): + ''' + Given the y_pred of an input image, get the predicted bbox and label info. + return: + pred_content: 2d list. + ''' + image_id = image_ids[0] + + # keep the first dimension 1 + pred_boxes = y_pred[0][0:1] + pred_confs = y_pred[1][0:1] + pred_probs = y_pred[2][0:1] + + boxes, scores, labels = sess.run(gpu_nms_op, + feed_dict={pred_boxes_flag: pred_boxes, + pred_scores_flag: pred_confs * pred_probs}) + + pred_content = [] + for i in range(len(labels)): + x_min, y_min, x_max, y_max = boxes[i] + score = scores[i] + label = labels[i] + pred_content.append([image_id, x_min, y_min, x_max, y_max, score, label]) + + return pred_content + + +def calc_iou(pred_boxes, true_boxes): + ''' + Maintain an efficient way to calculate the ios matrix using the numpy broadcast tricks. + shape_info: pred_boxes: [N, 4] + true_boxes: [V, 4] + return: IoU matrix: shape: [N, V] + ''' + + # [N, 1, 4] + pred_boxes = np.expand_dims(pred_boxes, -2) + # [1, V, 4] + true_boxes = np.expand_dims(true_boxes, 0) + + # [N, 1, 2] & [1, V, 2] ==> [N, V, 2] + intersect_mins = np.maximum(pred_boxes[..., :2], true_boxes[..., :2]) + intersect_maxs = np.minimum(pred_boxes[..., 2:], true_boxes[..., 2:]) + intersect_wh = np.maximum(intersect_maxs - intersect_mins, 0.) + + # shape: [N, V] + intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1] + # shape: [N, 1, 2] + pred_box_wh = pred_boxes[..., 2:] - pred_boxes[..., :2] + # shape: [N, 1] + pred_box_area = pred_box_wh[..., 0] * pred_box_wh[..., 1] + # [1, V, 2] + true_boxes_wh = true_boxes[..., 2:] - true_boxes[..., :2] + # [1, V] + true_boxes_area = true_boxes_wh[..., 0] * true_boxes_wh[..., 1] + + # shape: [N, V] + iou = intersect_area / (pred_box_area + true_boxes_area - intersect_area + 1e-10) + + return iou + + +def voc_eval(gt_dict, val_preds, classidx, iou_thres=0.5, use_07_metric=False): + ''' + Top level function that does the PASCAL VOC evaluation. + ''' + # 1.obtain gt: extract all gt objects for this class + class_recs = {} + npos = 0 + for img_id in gt_dict: + R = [obj for obj in gt_dict[img_id] if obj[-1] == classidx] + bbox = np.array([x[:4] for x in R]) + det = [False] * len(R) + npos += len(R) + class_recs[img_id] = {'bbox': bbox, 'det': det} + + # 2. obtain pred results + pred = [x for x in val_preds if x[-1] == classidx] + img_ids = [x[0] for x in pred] + confidence = np.array([x[-2] for x in pred]) + BB = np.array([[x[1], x[2], x[3], x[4]] for x in pred]) + + # 3. sort by confidence + sorted_ind = np.argsort(-confidence) + try: + BB = BB[sorted_ind, :] + except: + print('no box, ignore') + return 1e-6, 1e-6, 0, 0, 0 + img_ids = [img_ids[x] for x in sorted_ind] + + # 4. mark TPs and FPs + nd = len(img_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + + for d in range(nd): + # all the gt info in some image + R = class_recs[img_ids[d]] + bb = BB[d, :] + ovmax = -np.Inf + BBGT = R['bbox'] + + if BBGT.size > 0: + # calc iou + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 2], bb[2]) + iymax = np.minimum(BBGT[:, 3], bb[3]) + iw = np.maximum(ixmax - ixmin + 1., 0.) + ih = np.maximum(iymax - iymin + 1., 0.) + inters = iw * ih + + # union + uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) + (BBGT[:, 2] - BBGT[:, 0] + 1.) * ( + BBGT[:, 3] - BBGT[:, 1] + 1.) - inters) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > iou_thres: + # gt not matched yet + if not R['det'][jmax]: + tp[d] = 1. + R['det'][jmax] = 1 + else: + fp[d] = 1. + else: + fp[d] = 1. + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) + ap = voc_ap(rec, prec, use_07_metric) + + # return rec, prec, ap + return npos, nd, tp[-1] / float(npos), tp[-1] / float(nd), ap + + +def evaluate_on_gpu(sess, gpu_nms_op, pred_boxes_flag, pred_scores_flag, + y_pred, y_true, num_classes, iou_thresh=0.5, calc_now=True): + ''' + Given y_pred and y_true of a batch of data, get the recall and precision of the current batch. + This function will perform gpu operation on the GPU. + ''' + + num_images = y_true[0].shape[0] + true_labels_dict = {i: 0 for i in range(num_classes)} # {class: count} + pred_labels_dict = {i: 0 for i in range(num_classes)} + true_positive_dict = {i: 0 for i in range(num_classes)} + + for i in range(num_images): + true_labels_list, true_boxes_list = [], [] + for j in range(3): # three feature maps + # shape: [13, 13, 3, 80] + true_probs_temp = y_true[j][i][..., 5:-1] + # shape: [13, 13, 3, 4] (x_center, y_center, w, h) + true_boxes_temp = y_true[j][i][..., 0:4] + + # [13, 13, 3] + object_mask = true_probs_temp.sum(axis=-1) > 0 + + # [V, 80] V: Ground truth number of the current image + true_probs_temp = true_probs_temp[object_mask] + # [V, 4] + true_boxes_temp = true_boxes_temp[object_mask] + + # [V], labels, each from 0 to 79 + true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist() + # [V, 4] (x_center, y_center, w, h) + true_boxes_list += true_boxes_temp.tolist() + + if len(true_labels_list) != 0: + for cls, count in Counter(true_labels_list).items(): + true_labels_dict[cls] += count + + # [V, 4] (xmin, ymin, xmax, ymax) + true_boxes = np.array(true_boxes_list) + box_centers, box_sizes = true_boxes[:, 0:2], true_boxes[:, 2:4] + true_boxes[:, 0:2] = box_centers - box_sizes / 2. + true_boxes[:, 2:4] = true_boxes[:, 0:2] + box_sizes + + # [1, xxx, 4] + pred_boxes = y_pred[0][i:i + 1] + pred_confs = y_pred[1][i:i + 1] + pred_probs = y_pred[2][i:i + 1] + + # pred_boxes: [N, 4] + # pred_confs: [N] + # pred_labels: [N] + # N: Detected box number of the current image + pred_boxes, pred_confs, pred_labels = sess.run(gpu_nms_op, + feed_dict={pred_boxes_flag: pred_boxes, + pred_scores_flag: pred_confs * pred_probs}) + # len: N + pred_labels_list = [] if pred_labels is None else pred_labels.tolist() + if pred_labels_list == []: + continue + + # calc iou + # [N, V] + iou_matrix = calc_iou(pred_boxes, true_boxes) + # [N] + max_iou_idx = np.argmax(iou_matrix, axis=-1) + + correct_idx = [] + correct_conf = [] + for k in range(max_iou_idx.shape[0]): + pred_labels_dict[pred_labels_list[k]] += 1 + match_idx = max_iou_idx[k] # V level + if iou_matrix[k, match_idx] > iou_thresh and true_labels_list[match_idx] == pred_labels_list[k]: + if match_idx not in correct_idx: + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + else: + same_idx = correct_idx.index(match_idx) + if pred_confs[k] > correct_conf[same_idx]: + correct_idx.pop(same_idx) + correct_conf.pop(same_idx) + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + + for t in correct_idx: + true_positive_dict[true_labels_list[t]] += 1 + + if calc_now: + # avoid divided by 0 + recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6) + precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6) + + return recall, precision + else: + return true_positive_dict, true_labels_dict, pred_labels_dict + + +def evaluate_on_cpu(y_pred, y_true, num_classes, calc_now=True, max_boxes=50, score_thresh=0.5, iou_thresh=0.5): + ''' + Given y_pred and y_true of a batch of data, get the recall and precision of the current batch. + ''' + + num_images = y_true[0].shape[0] + true_labels_dict = {i: 0 for i in range(num_classes)} # {class: count} + pred_labels_dict = {i: 0 for i in range(num_classes)} + true_positive_dict = {i: 0 for i in range(num_classes)} + + for i in range(num_images): + true_labels_list, true_boxes_list = [], [] + for j in range(3): # three feature maps + # shape: [13, 13, 3, 80] + true_probs_temp = y_true[j][i][..., 5:-1] + # shape: [13, 13, 3, 4] (x_center, y_center, w, h) + true_boxes_temp = y_true[j][i][..., 0:4] + + # [13, 13, 3] + object_mask = true_probs_temp.sum(axis=-1) > 0 + + # [V, 3] V: Ground truth number of the current image + true_probs_temp = true_probs_temp[object_mask] + # [V, 4] + true_boxes_temp = true_boxes_temp[object_mask] + + # [V], labels + true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist() + # [V, 4] (x_center, y_center, w, h) + true_boxes_list += true_boxes_temp.tolist() + + if len(true_labels_list) != 0: + for cls, count in Counter(true_labels_list).items(): + true_labels_dict[cls] += count + + # [V, 4] (xmin, ymin, xmax, ymax) + true_boxes = np.array(true_boxes_list) + box_centers, box_sizes = true_boxes[:, 0:2], true_boxes[:, 2:4] + true_boxes[:, 0:2] = box_centers - box_sizes / 2. + true_boxes[:, 2:4] = true_boxes[:, 0:2] + box_sizes + + # [1, xxx, 4] + pred_boxes = y_pred[0][i:i + 1] + pred_confs = y_pred[1][i:i + 1] + pred_probs = y_pred[2][i:i + 1] + + # pred_boxes: [N, 4] + # pred_confs: [N] + # pred_labels: [N] + # N: Detected box number of the current image + pred_boxes, pred_confs, pred_labels = cpu_nms(pred_boxes, pred_confs * pred_probs, num_classes, + max_boxes=max_boxes, score_thresh=score_thresh, + iou_thresh=iou_thresh) + + # len: N + pred_labels_list = [] if pred_labels is None else pred_labels.tolist() + if pred_labels_list == []: + continue + + # calc iou + # [N, V] + iou_matrix = calc_iou(pred_boxes, true_boxes) + # [N] + max_iou_idx = np.argmax(iou_matrix, axis=-1) + + correct_idx = [] + correct_conf = [] + for k in range(max_iou_idx.shape[0]): + pred_labels_dict[pred_labels_list[k]] += 1 + match_idx = max_iou_idx[k] # V level + if iou_matrix[k, match_idx] > iou_thresh and true_labels_list[match_idx] == pred_labels_list[k]: + if match_idx not in correct_idx: + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + else: + same_idx = correct_idx.index(match_idx) + if pred_confs[k] > correct_conf[same_idx]: + correct_idx.pop(same_idx) + correct_conf.pop(same_idx) + correct_idx.append(match_idx) + correct_conf.append(pred_confs[k]) + + for t in correct_idx: + true_positive_dict[true_labels_list[t]] += 1 + + if calc_now: + # avoid divided by 0 + recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6) + precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6) + + return recall, precision + else: + return true_positive_dict, true_labels_dict, pred_labels_dict + + +gt_dict = {} # key: img_id, value: gt object list +def parse_gt_rec(gt_filename, target_img_size, letterbox_resize=True): + ''' + parse and re-organize the gt info. + return: + gt_dict: dict. Each key is a img_id, the value is the gt bboxes in the corresponding img. + ''' + + global gt_dict + + if not gt_dict: + new_width, new_height = target_img_size + with open(gt_filename, 'r') as f: + for line in f: + img_id, pic_path, boxes, labels, ori_width, ori_height = parse_line(line) + + objects = [] + for i in range(len(labels)): + x_min, y_min, x_max, y_max = boxes[i] + label = labels[i] + + if letterbox_resize: + resize_ratio = min(new_width / ori_width, new_height / ori_height) + + resize_w = int(resize_ratio * ori_width) + resize_h = int(resize_ratio * ori_height) + + dw = int((new_width - resize_w) / 2) + dh = int((new_height - resize_h) / 2) + + objects.append([x_min * resize_ratio + dw, + y_min * resize_ratio + dh, + x_max * resize_ratio + dw, + y_max * resize_ratio + dh, + label]) + else: + objects.append([x_min * new_width / ori_width, + y_min * new_height / ori_height, + x_max * new_width / ori_width, + y_max * new_height / ori_height, + label]) + gt_dict[img_id] = objects + return gt_dict diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py index e69de29..2495c6f 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_image_formatting_utils.py @@ -0,0 +1,381 @@ +from __future__ import division, print_function + +import numpy as np +import sys +import cv2 +import random + +PY_VERSION = sys.version_info[0] +iter_cnt = 0 + + +def random_crop_with_constraints(bbox, size, min_scale=0.3, max_scale=1, + max_aspect_ratio=2, constraints=None, + max_trial=50): + """Crop an image randomly with bounding box constraints. + This data augmentation is used in training of + Single Shot Multibox Detector [#]_. More details can be found in + data augmentation section of the original paper. + .. [#] Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, + Scott Reed, Cheng-Yang Fu, Alexander C. Berg. + SSD: Single Shot MultiBox Detector. ECCV 2016. + Parameters + ---------- + bbox : numpy.ndarray + Numpy.ndarray with shape (N, 4+) where N is the number of bounding boxes. + The second axis represents attributes of the bounding box. + Specifically, these are :math:`(x_{min}, y_{min}, x_{max}, y_{max})`, + we allow additional attributes other than coordinates, which stay intact + during bounding box transformations. + size : tuple + Tuple of length 2 of image shape as (width, height). + min_scale : float + The minimum ratio between a cropped region and the original image. + The default value is :obj:`0.3`. + max_scale : float + The maximum ratio between a cropped region and the original image. + The default value is :obj:`1`. + max_aspect_ratio : float + The maximum aspect ratio of cropped region. + The default value is :obj:`2`. + constraints : iterable of tuples + An iterable of constraints. + Each constraint should be :obj:`(min_iou, max_iou)` format. + If means no constraint if set :obj:`min_iou` or :obj:`max_iou` to :obj:`None`. + If this argument defaults to :obj:`None`, :obj:`((0.1, None), (0.3, None), + (0.5, None), (0.7, None), (0.9, None), (None, 1))` will be used. + max_trial : int + Maximum number of trials for each constraint before exit no matter what. + Returns + ------- + numpy.ndarray + Cropped bounding boxes with shape :obj:`(M, 4+)` where M <= N. + tuple + Tuple of length 4 as (x_offset, y_offset, new_width, new_height). + """ + # default params in paper + if constraints is None: + constraints = ( + (0.1, None), + (0.3, None), + (0.5, None), + (0.7, None), + (0.9, None), + (None, 1), + ) + + w, h = size + + candidates = [(0, 0, w, h)] + for min_iou, max_iou in constraints: + min_iou = -np.inf if min_iou is None else min_iou + max_iou = np.inf if max_iou is None else max_iou + + for _ in range(max_trial): + scale = random.uniform(min_scale, max_scale) + aspect_ratio = random.uniform( + max(1 / max_aspect_ratio, scale * scale), + min(max_aspect_ratio, 1 / (scale * scale))) + crop_h = int(h * scale / np.sqrt(aspect_ratio)) + crop_w = int(w * scale * np.sqrt(aspect_ratio)) + + crop_t = random.randrange(h - crop_h) + crop_l = random.randrange(w - crop_w) + crop_bb = np.array((crop_l, crop_t, crop_l + crop_w, crop_t + crop_h)) + + if len(bbox) == 0: + top, bottom = crop_t, crop_t + crop_h + left, right = crop_l, crop_l + crop_w + return bbox, (left, top, right-left, bottom-top) + + iou = bbox_iou(bbox, crop_bb[np.newaxis]) + if min_iou <= iou.min() and iou.max() <= max_iou: + top, bottom = crop_t, crop_t + crop_h + left, right = crop_l, crop_l + crop_w + candidates.append((left, top, right-left, bottom-top)) + break + + # random select one + while candidates: + crop = candidates.pop(np.random.randint(0, len(candidates))) + new_bbox = bbox_crop(bbox, crop, allow_outside_center=False) + if new_bbox.size < 1: + continue + new_crop = (crop[0], crop[1], crop[2], crop[3]) + return new_bbox, new_crop + return bbox, (0, 0, w, h) + + +def random_color_distort(img, brightness_delta=32, hue_vari=18, sat_vari=0.5, val_vari=0.5): + ''' + randomly distort image color. Adjust brightness, hue, saturation, value. + param: + img: a BGR uint8 format OpenCV image. HWC format. + ''' + + def random_hue(img_hsv, hue_vari, p=0.5): + if np.random.uniform(0, 1) > p: + hue_delta = np.random.randint(-hue_vari, hue_vari) + img_hsv[:, :, 0] = (img_hsv[:, :, 0] + hue_delta) % 180 + return img_hsv + + def random_saturation(img_hsv, sat_vari, p=0.5): + if np.random.uniform(0, 1) > p: + sat_mult = 1 + np.random.uniform(-sat_vari, sat_vari) + img_hsv[:, :, 1] *= sat_mult + return img_hsv + + def random_value(img_hsv, val_vari, p=0.5): + if np.random.uniform(0, 1) > p: + val_mult = 1 + np.random.uniform(-val_vari, val_vari) + img_hsv[:, :, 2] *= val_mult + return img_hsv + + def random_brightness(img, brightness_delta, p=0.5): + if np.random.uniform(0, 1) > p: + img = img.astype(np.float32) + brightness_delta = int(np.random.uniform(-brightness_delta, brightness_delta)) + img = img + brightness_delta + return np.clip(img, 0, 255) + + # brightness + img = random_brightness(img, brightness_delta) + img = img.astype(np.uint8) + + # color jitter + img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV).astype(np.float32) + + if np.random.randint(0, 2): + img_hsv = random_value(img_hsv, val_vari) + img_hsv = random_saturation(img_hsv, sat_vari) + img_hsv = random_hue(img_hsv, hue_vari) + else: + img_hsv = random_saturation(img_hsv, sat_vari) + img_hsv = random_hue(img_hsv, hue_vari) + img_hsv = random_value(img_hsv, val_vari) + + img_hsv = np.clip(img_hsv, 0, 255) + img = cv2.cvtColor(img_hsv.astype(np.uint8), cv2.COLOR_HSV2BGR) + + return img + + +def letterbox_resize(img, new_width, new_height, interp=0): + ''' + Letterbox resize. keep the original aspect ratio in the resized image. + ''' + ori_height, ori_width = img.shape[:2] + + resize_ratio = min(new_width / ori_width, new_height / ori_height) + + resize_w = int(resize_ratio * ori_width) + resize_h = int(resize_ratio * ori_height) + + img = cv2.resize(img, (resize_w, resize_h), interpolation=interp) + image_padded = np.full((new_height, new_width, 3), 128, np.uint8) + + dw = int((new_width - resize_w) / 2) + dh = int((new_height - resize_h) / 2) + + image_padded[dh: resize_h + dh, dw: resize_w + dw, :] = img + + return image_padded, resize_ratio, dw, dh + + +def resize_with_bbox(img, bbox, new_width, new_height, interp=0, letterbox=False): + ''' + Resize the image and correct the bbox accordingly. + ''' + + if letterbox: + image_padded, resize_ratio, dw, dh = letterbox_resize(img, new_width, new_height, interp) + + # xmin, xmax + bbox[:, [0, 2]] = bbox[:, [0, 2]] * resize_ratio + dw + # ymin, ymax + bbox[:, [1, 3]] = bbox[:, [1, 3]] * resize_ratio + dh + + return image_padded, bbox + else: + ori_height, ori_width = img.shape[:2] + + img = cv2.resize(img, (new_width, new_height), interpolation=interp) + + # xmin, xmax + bbox[:, [0, 2]] = bbox[:, [0, 2]] / ori_width * new_width + # ymin, ymax + bbox[:, [1, 3]] = bbox[:, [1, 3]] / ori_height * new_height + + return img, bbox + + +def random_flip(img, bbox, px=0, py=0): + ''' + Randomly flip the image and correct the bbox. + param: + px: + the probability of horizontal flip + py: + the probability of vertical flip + ''' + height, width = img.shape[:2] + if np.random.uniform(0, 1) < px: + img = cv2.flip(img, 1) + xmax = width - bbox[:, 0] + xmin = width - bbox[:, 2] + bbox[:, 0] = xmin + bbox[:, 2] = xmax + + if np.random.uniform(0, 1) < py: + img = cv2.flip(img, 0) + ymax = height - bbox[:, 1] + ymin = height - bbox[:, 3] + bbox[:, 1] = ymin + bbox[:, 3] = ymax + return img, bbox + + +def random_expand(img, bbox, max_ratio=4, fill=0, keep_ratio=True): + ''' + Random expand original image with borders, this is identical to placing + the original image on a larger canvas. + param: + max_ratio : + Maximum ratio of the output image on both direction(vertical and horizontal) + fill : + The value(s) for padded borders. + keep_ratio : bool + If `True`, will keep output image the same aspect ratio as input. + ''' + h, w, c = img.shape + ratio_x = random.uniform(1, max_ratio) + if keep_ratio: + ratio_y = ratio_x + else: + ratio_y = random.uniform(1, max_ratio) + + oh, ow = int(h * ratio_y), int(w * ratio_x) + off_y = random.randint(0, oh - h) + off_x = random.randint(0, ow - w) + + dst = np.full(shape=(oh, ow, c), fill_value=fill, dtype=img.dtype) + + dst[off_y:off_y + h, off_x:off_x + w, :] = img + + # correct bbox + bbox[:, :2] += (off_x, off_y) + bbox[:, 2:4] += (off_x, off_y) + + return dst, bbox + + +def mix_up(img1, img2, bbox1, bbox2): + ''' + return: + mix_img: HWC format mix up image + mix_bbox: [N, 5] shape mix up bbox, i.e. `x_min, y_min, x_max, y_mix, mixup_weight`. + ''' + height = max(img1.shape[0], img2.shape[0]) + width = max(img1.shape[1], img2.shape[1]) + + mix_img = np.zeros(shape=(height, width, 3), dtype='float32') + + # rand_num = np.random.random() + rand_num = np.random.beta(1.5, 1.5) + rand_num = max(0, min(1, rand_num)) + mix_img[:img1.shape[0], :img1.shape[1], :] = img1.astype('float32') * rand_num + mix_img[:img2.shape[0], :img2.shape[1], :] += img2.astype('float32') * (1. - rand_num) + + mix_img = mix_img.astype('uint8') + + # the last element of the 2nd dimention is the mix up weight + bbox1 = np.concatenate((bbox1, np.full(shape=(bbox1.shape[0], 1), fill_value=rand_num)), axis=-1) + bbox2 = np.concatenate((bbox2, np.full(shape=(bbox2.shape[0], 1), fill_value=1. - rand_num)), axis=-1) + mix_bbox = np.concatenate((bbox1, bbox2), axis=0) + + return mix_img, mix_bbox + + +def bbox_crop(bbox, crop_box=None, allow_outside_center=True): + """Crop bounding boxes according to slice area. + This method is mainly used with image cropping to ensure bonding boxes fit + within the cropped image. + Parameters + ---------- + bbox : numpy.ndarray + Numpy.ndarray with shape (N, 4+) where N is the number of bounding boxes. + The second axis represents attributes of the bounding box. + Specifically, these are :math:`(x_{min}, y_{min}, x_{max}, y_{max})`, + we allow additional attributes other than coordinates, which stay intact + during bounding box transformations. + crop_box : tuple + Tuple of length 4. :math:`(x_{min}, y_{min}, width, height)` + allow_outside_center : bool + If `False`, remove bounding boxes which have centers outside cropping area. + Returns + ------- + numpy.ndarray + Cropped bounding boxes with shape (M, 4+) where M <= N. + """ + bbox = bbox.copy() + if crop_box is None: + return bbox + if not len(crop_box) == 4: + raise ValueError( + "Invalid crop_box parameter, requires length 4, given {}".format(str(crop_box))) + if sum([int(c is None) for c in crop_box]) == 4: + return bbox + + l, t, w, h = crop_box + + left = l if l else 0 + top = t if t else 0 + right = left + (w if w else np.inf) + bottom = top + (h if h else np.inf) + crop_bbox = np.array((left, top, right, bottom)) + + if allow_outside_center: + mask = np.ones(bbox.shape[0], dtype=bool) + else: + centers = (bbox[:, :2] + bbox[:, 2:4]) / 2 + mask = np.logical_and(crop_bbox[:2] <= centers, centers < crop_bbox[2:]).all(axis=1) + + # transform borders + bbox[:, :2] = np.maximum(bbox[:, :2], crop_bbox[:2]) + bbox[:, 2:4] = np.minimum(bbox[:, 2:4], crop_bbox[2:4]) + bbox[:, :2] -= crop_bbox[:2] + bbox[:, 2:4] -= crop_bbox[:2] + + mask = np.logical_and(mask, (bbox[:, :2] < bbox[:, 2:4]).all(axis=1)) + bbox = bbox[mask] + return bbox + + +def bbox_iou(bbox_a, bbox_b, offset=0): + """Calculate Intersection-Over-Union(IOU) of two bounding boxes. + Parameters + ---------- + bbox_a : numpy.ndarray + An ndarray with shape :math:`(N, 4)`. + bbox_b : numpy.ndarray + An ndarray with shape :math:`(M, 4)`. + offset : float or int, default is 0 + The ``offset`` is used to control the whether the width(or height) is computed as + (right - left + ``offset``). + Note that the offset must be 0 for normalized bboxes, whose ranges are in ``[0, 1]``. + Returns + ------- + numpy.ndarray + An ndarray with shape :math:`(N, M)` indicates IOU between each pairs of + bounding boxes in `bbox_a` and `bbox_b`. + """ + if bbox_a.shape[1] < 4 or bbox_b.shape[1] < 4: + raise IndexError("Bounding boxes axis 1 must have at least length 4") + + tl = np.maximum(bbox_a[:, None, :2], bbox_b[:, :2]) + br = np.minimum(bbox_a[:, None, 2:4], bbox_b[:, 2:4]) + + area_i = np.prod(br - tl + offset, axis=2) * (tl < br).all(axis=2) + area_a = np.prod(bbox_a[:, 2:4] - bbox_a[:, :2] + offset, axis=1) + area_b = np.prod(bbox_b[:, 2:4] - bbox_b[:, :2] + offset, axis=1) + return area_i / (area_a[:, None] + area_b - area_i) diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py index e69de29..91b0825 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/tensorflow_processing_utils.py @@ -0,0 +1,127 @@ +from __future__ import division, print_function + +import numpy as np +import sys +import tensorflow as tf + +PY_VERSION = sys.version_info[0] +iter_cnt = 0 + + +def cpu_nms(boxes, scores, num_classes, max_boxes=50, score_thresh=0.5, iou_thresh=0.5): + """ + Perform NMS on CPU. + Arguments: + boxes: shape [1, 10647, 4] + scores: shape [1, 10647, num_classes] + """ + + boxes = boxes.reshape(-1, 4) + scores = scores.reshape(-1, num_classes) + # Picked bounding boxes + picked_boxes, picked_score, picked_label = [], [], [] + + for i in range(num_classes): + indices = np.where(scores[:,i] >= score_thresh) + filter_boxes = boxes[indices] + filter_scores = scores[:,i][indices] + if len(filter_boxes) == 0: + continue + # do non_max_suppression on the cpu + indices = py_nms(filter_boxes, filter_scores, + max_boxes=max_boxes, iou_thresh=iou_thresh) + picked_boxes.append(filter_boxes[indices]) + picked_score.append(filter_scores[indices]) + picked_label.append(np.ones(len(indices), dtype='int32')*i) + if len(picked_boxes) == 0: + return None, None, None + + boxes = np.concatenate(picked_boxes, axis=0) + score = np.concatenate(picked_score, axis=0) + label = np.concatenate(picked_label, axis=0) + + return boxes, score, label + + + +def py_nms(boxes, scores, max_boxes=50, iou_thresh=0.5): + """ + Pure Python NMS baseline. + + Arguments: boxes: shape of [-1, 4], the value of '-1' means that dont know the + exact number of boxes + scores: shape of [-1,] + max_boxes: representing the maximum of boxes to be selected by non_max_suppression + iou_thresh: representing iou_threshold for deciding to keep boxes + """ + assert boxes.shape[1] == 4 and len(scores.shape) == 1 + + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + + areas = (x2 - x1) * (y2 - y1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1 + 1) + h = np.maximum(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= iou_thresh)[0] + order = order[inds + 1] + + return keep[:max_boxes] + + +def gpu_nms(boxes, scores, num_classes, max_boxes=50, score_thresh=0.5, nms_thresh=0.5): + """ + Perform NMS on GPU using TensorFlow. + + params: + boxes: tensor of shape [1, 10647, 4] # 10647=(13*13+26*26+52*52)*3, for input 416*416 image + scores: tensor of shape [1, 10647, num_classes], score=conf*prob + num_classes: total number of classes + max_boxes: integer, maximum number of predicted boxes you'd like, default is 50 + score_thresh: if [ highest class probability score < score_threshold] + then get rid of the corresponding box + nms_thresh: real value, "intersection over union" threshold used for NMS filtering + """ + + boxes_list, label_list, score_list = [], [], [] + max_boxes = tf.constant(max_boxes, dtype='int32') + + # since we do nms for single image, then reshape it + boxes = tf.reshape(boxes, [-1, 4]) # '-1' means we don't konw the exact number of boxes + score = tf.reshape(scores, [-1, num_classes]) + + # Step 1: Create a filtering mask based on "box_class_scores" by using "threshold". + mask = tf.greater_equal(score, tf.constant(score_thresh)) + # Step 2: Do non_max_suppression for each class + for i in range(num_classes): + # Step 3: Apply the mask to scores, boxes and pick them out + filter_boxes = tf.boolean_mask(boxes, mask[:,i]) + filter_score = tf.boolean_mask(score[:,i], mask[:,i]) + nms_indices = tf.image.non_max_suppression(boxes=filter_boxes, + scores=filter_score, + max_output_size=max_boxes, + iou_threshold=nms_thresh, name='nms_indices') + label_list.append(tf.ones_like(tf.gather(filter_score, nms_indices), 'int32')*i) + boxes_list.append(tf.gather(filter_boxes, nms_indices)) + score_list.append(tf.gather(filter_score, nms_indices)) + + boxes = tf.concat(boxes_list, axis=0) + score = tf.concat(score_list, axis=0) + label = tf.concat(label_list, axis=0) + + return boxes, score, label \ No newline at end of file diff --git a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py index fc83981..f1a6073 100644 --- a/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py +++ b/src/traffic_analysis/d04_modelling/transfer_learning/train_tensorflow_model.py @@ -9,8 +9,10 @@ from tqdm import trange from traffic_analysis.d04_modelling.transfer_learning.tensorflow_training_utils import get_batch_data, \ - make_summary, config_learning_rate, config_optimizer, AverageMeter, \ - evaluate_on_gpu, get_preds_gpu, voc_eval, parse_gt_rec, gpu_nms + make_summary, config_learning_rate, config_optimizer, AverageMeter +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_evaluation_utils import evaluate_on_gpu, \ + get_preds_gpu, voc_eval, parse_gt_rec +from traffic_analysis.d04_modelling.transfer_learning.tensorflow_processing_utils import gpu_nms from traffic_analysis.d04_modelling.transfer_learning.tensorflow_model_loader import YoloV3 from traffic_analysis.d04_modelling.transfer_learning.convert_darknet_to_tensorflow import parse_anchors from traffic_analysis.d04_modelling.transfer_learning.tensorflow_detection_utils import read_class_names From 3ef194b239bfccd77593697f5c7ee2f00165c3ad Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Fri, 23 Aug 2019 10:57:33 +0100 Subject: [PATCH 64/65] added blank lines to make pep8 compliant --- src/run_transfer_learning.py | 2 +- src/traffic_analysis/d02_ref/ref_utils.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/run_transfer_learning.py b/src/run_transfer_learning.py index fd9f8af..68d66a3 100644 --- a/src/run_transfer_learning.py +++ b/src/run_transfer_learning.py @@ -30,4 +30,4 @@ train_params=train_params, train_file='train.txt', test_file='test.txt', - selected_labels=params['selected_labels']) \ No newline at end of file + selected_labels=params['selected_labels']) diff --git a/src/traffic_analysis/d02_ref/ref_utils.py b/src/traffic_analysis/d02_ref/ref_utils.py index f61ee99..5963baa 100644 --- a/src/traffic_analysis/d02_ref/ref_utils.py +++ b/src/traffic_analysis/d02_ref/ref_utils.py @@ -72,6 +72,7 @@ def get_names_of_folder_content_from_s3(bucket_name, prefix, s3_profile): return elapsed_time, files + def get_s3_video_path_from_xml_name(xml_file_name, s3_creds, paths): # Supports old and new naming conventions From 2d90db2ee274464d848a6ae565826d8c982b2434 Mon Sep 17 00:00:00 2001 From: jackattack1415 Date: Fri, 23 Aug 2019 10:57:53 +0100 Subject: [PATCH 65/65] removed hard coding --- src/traffic_analysis/d04_modelling/perform_detection_opencv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/traffic_analysis/d04_modelling/perform_detection_opencv.py b/src/traffic_analysis/d04_modelling/perform_detection_opencv.py index de6ddcb..2259237 100644 --- a/src/traffic_analysis/d04_modelling/perform_detection_opencv.py +++ b/src/traffic_analysis/d04_modelling/perform_detection_opencv.py @@ -67,7 +67,7 @@ def populate_labels(model_name: str, """ model_file_path = paths['local_detection_model'] - labels_file_path = os.path.join(model_file_path, 'yolov3', 'coco.names') + labels_file_path = os.path.join(model_file_path, model_name, 'coco.names') f = open(labels_file_path, 'r') labels = [line.strip() for line in f.readlines()]