13
13
import requests
14
14
from tzlocal import get_localzone
15
15
16
- from config import GPX_FOLDER , JSON_FILE , SQL_FILE , run_map , start_point , TCX_FOLDER , UTC_TIMEZONE
16
+ from config import (
17
+ GPX_FOLDER ,
18
+ JSON_FILE ,
19
+ SQL_FILE ,
20
+ run_map ,
21
+ start_point ,
22
+ TCX_FOLDER ,
23
+ UTC_TIMEZONE ,
24
+ )
17
25
from generator import Generator
18
26
from utils import adjust_time
19
27
20
28
TOKEN_REFRESH_URL = "https://sport.health.heytapmobi.com/open/v1/oauth/token"
21
29
OPPO_HEADERS = {
22
30
"User-Agent" : "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0" ,
23
31
"Content-Type" : "application/json" ,
24
- "Accept" : "application/json"
32
+ "Accept" : "application/json" ,
25
33
}
26
34
27
35
# Query brief version of sports records
@@ -102,13 +110,13 @@ def get_access_token(session, client_id, client_secret, refresh_token):
102
110
headers = {
103
111
"User-Agent" : "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0" ,
104
112
"Content-Type" : "application/json" ,
105
- "Accept" : "application/json"
113
+ "Accept" : "application/json" ,
106
114
}
107
115
data = {
108
116
"clientId" : client_id ,
109
117
"clientSecret" : client_secret ,
110
118
"refreshToken" : refresh_token ,
111
- "grantType" : "refreshToken"
119
+ "grantType" : "refreshToken" ,
112
120
}
113
121
r = session .post (TOKEN_REFRESH_URL , headers = headers , json = data )
114
122
if r .ok :
@@ -129,29 +137,33 @@ def get_to_download_runs_ranges(session, sync_months, headers, start_timestamp):
129
137
current_time = current_time + timedelta (days = - 30 )
130
138
temp_start = int (current_time .timestamp () * 1000 )
131
139
sync_months = sync_months - 1
132
- result .extend (parse_brief_sport_data (session , headers , temp_start , temp_end ))
140
+ result .extend (
141
+ parse_brief_sport_data (session , headers , temp_start , temp_end )
142
+ )
133
143
else :
134
144
while start_datatime < current_time :
135
145
temp_start = int (start_datatime .timestamp () * 1000 )
136
146
start_datatime = start_datatime + timedelta (days = 30 )
137
147
temp_end = int (start_datatime .timestamp () * 1000 )
138
- result .extend (parse_brief_sport_data (session , headers , temp_start , temp_end ))
148
+ result .extend (
149
+ parse_brief_sport_data (session , headers , temp_start , temp_end )
150
+ )
139
151
return result
140
152
141
153
142
154
def parse_brief_sport_data (session , headers , temp_start , temp_end ):
143
155
result = []
144
156
r = session .get (
145
- BRIEF_SPORT_DATA_API .format (
146
- end_time = temp_end ,
147
- start_time = temp_start
148
- ),
149
- headers = headers
157
+ BRIEF_SPORT_DATA_API .format (end_time = temp_end , start_time = temp_start ),
158
+ headers = headers ,
150
159
)
151
160
if r .ok :
152
161
sport_logs = r .json ()["body" ]
153
162
for i in sport_logs :
154
- if i ["sportMode" ] in AVAILABLE_INDOOR_SPORT_MODE or i ["sportMode" ] in AVAILABLE_OUTDOOR_SPORT_MODE :
163
+ if (
164
+ i ["sportMode" ] in AVAILABLE_INDOOR_SPORT_MODE
165
+ or i ["sportMode" ] in AVAILABLE_OUTDOOR_SPORT_MODE
166
+ ):
155
167
result .append ((i ["startTime" ], i ["endTime" ]))
156
168
print (f"sync record: start_time: " + str (i ["startTime" ]))
157
169
time .sleep (1 ) # spider rule
@@ -160,11 +172,7 @@ def parse_brief_sport_data(session, headers, temp_start, temp_end):
160
172
161
173
def get_single_run_data (session , headers , start , end ):
162
174
r = session .get (
163
- DETAILED_SPORT_DATA_API .format (
164
- end_time = end ,
165
- start_time = start
166
- ),
167
- headers = headers
175
+ DETAILED_SPORT_DATA_API .format (end_time = end , start_time = start ), headers = headers
168
176
)
169
177
if r .ok :
170
178
return r .json ()
@@ -207,7 +215,9 @@ def parse_raw_data_to_name_tuple(sport_data, with_gpx, with_tcx):
207
215
else :
208
216
print (f"ID { oppo_id } no gps data" )
209
217
210
- gps_data = [(item ["latitude" ], item ["longitude" ]) for item in other_data ["gpsPoint" ]]
218
+ gps_data = [
219
+ (item ["latitude" ], item ["longitude" ]) for item in other_data ["gpsPoint" ]
220
+ ]
211
221
polyline_str = polyline .encode (gps_data ) if gps_data else ""
212
222
start_latlng = start_point (* gps_data [0 ]) if gps_data else None
213
223
start_date = datetime .utcfromtimestamp (start_time / 1000 )
@@ -257,10 +267,13 @@ def get_all_oppo_tracks(
257
267
s = requests .Session ()
258
268
s , headers = get_access_token (s , client_id , client_secret , refresh_token )
259
269
260
- last_timestamp = 0 if (last_track_date == 0 ) \
270
+ last_timestamp = (
271
+ 0
272
+ if (last_track_date == 0 )
261
273
else int (
262
- datetime .timestamp (
263
- datetime .strptime (last_track_date , "%Y-%m-%d %H:%M:%S" )) * 1000
274
+ datetime .timestamp (datetime .strptime (last_track_date , "%Y-%m-%d %H:%M:%S" ))
275
+ * 1000
276
+ )
264
277
)
265
278
266
279
runs = get_to_download_runs_ranges (s , sync_months , headers , last_timestamp + 1000 )
@@ -270,14 +283,17 @@ def get_all_oppo_tracks(
270
283
print (f"parsing oppo id { str (start )} -{ str (end )} " )
271
284
try :
272
285
run_data = get_single_run_data (s , headers , start , end )
273
- track = parse_raw_data_to_name_tuple (run_data , with_download_gpx , with_download_tcx )
286
+ track = parse_raw_data_to_name_tuple (
287
+ run_data , with_download_gpx , with_download_tcx
288
+ )
274
289
tracks .append (track )
275
290
except Exception as e :
276
291
print (f"Something wrong paring keep id { str (start )} -{ str (end )} " + str (e ))
277
292
return tracks
278
293
279
294
280
- def switch (v ): yield lambda * c : v in c
295
+ def switch (v ):
296
+ yield lambda * c : v in c
281
297
282
298
283
299
def map_oppo_fit_type_to_gpx_type (oppo_type ):
@@ -344,7 +360,9 @@ def parse_points_to_gpx(sport_data, points_dict_list):
344
360
cad = p .get ("cad" )
345
361
if hr is not None or cad is not None :
346
362
hr_str = f"""<gpxtpx:hr>{ hr } </gpxtpx:hr>""" if hr is not None else ""
347
- cad_str = f"""<gpxtpx:cad>{ p ["cad" ]} </gpxtpx:cad>""" if cad is not None else ""
363
+ cad_str = (
364
+ f"""<gpxtpx:cad>{ p ["cad" ]} </gpxtpx:cad>""" if cad is not None else ""
365
+ )
348
366
gpx_extension = ET .fromstring (
349
367
f"""<gpxtpx:TrackPointExtension xmlns:gpxtpx="http://www.garmin.com/xmlschemas/TrackPointExtension/v1">
350
368
{ hr_str }
@@ -433,7 +451,9 @@ def parse_points_to_tcx(sport_data, points_dict_list):
433
451
# local time
434
452
start_time = sport_data ["startTime" ]
435
453
start_date = datetime .utcfromtimestamp (start_time / 1000 )
436
- fit_start_time = datetime .strftime (adjust_time (start_date , UTC_TIMEZONE ), "%Y-%m-%dT%H:%M:%SZ" )
454
+ fit_start_time = datetime .strftime (
455
+ adjust_time (start_date , UTC_TIMEZONE ), "%Y-%m-%dT%H:%M:%SZ"
456
+ )
437
457
438
458
# Root node
439
459
training_center_database = ET .Element (
@@ -463,12 +483,7 @@ def parse_points_to_tcx(sport_data, points_dict_list):
463
483
activity_id .text = fit_start_time # Codoon use start_time as ID
464
484
activity .append (activity_id )
465
485
# Creator
466
- activity_creator = ET .Element (
467
- "Creator" ,
468
- {
469
- "xsi:type" : "Device_t"
470
- }
471
- )
486
+ activity_creator = ET .Element ("Creator" , {"xsi:type" : "Device_t" })
472
487
activity .append (activity_creator )
473
488
# Name
474
489
activity_creator_name = ET .Element ("Name" )
@@ -494,24 +509,34 @@ def parse_points_to_tcx(sport_data, points_dict_list):
494
509
break
495
510
496
511
if idx + 1 != len (points_dict_list ):
497
- if item ["distance" ] < target_distance <= points_dict_list [idx + 1 ]["distance" ]:
512
+ if (
513
+ item ["distance" ]
514
+ < target_distance
515
+ <= points_dict_list [idx + 1 ]["distance" ]
516
+ ):
498
517
lap_split_indexes .append (idx )
499
518
500
519
if len (lap_split_indexes ) == 1 :
501
520
points_dict_list_chunks = [points_dict_list ]
502
521
else :
503
522
for idx , item in enumerate (lap_split_indexes ):
504
523
if idx + 1 == len (lap_split_indexes ):
505
- points_dict_list_chunks .append (points_dict_list [item : len (points_dict_list ) - 1 ])
524
+ points_dict_list_chunks .append (
525
+ points_dict_list [item : len (points_dict_list ) - 1 ]
526
+ )
506
527
else :
507
- points_dict_list_chunks .append (points_dict_list [item : lap_split_indexes [idx + 1 ]])
528
+ points_dict_list_chunks .append (
529
+ points_dict_list [item : lap_split_indexes [idx + 1 ]]
530
+ )
508
531
509
532
current_distance = 0
510
533
current_time = start_date
511
534
512
535
for item in points_dict_list_chunks :
513
536
# Lap
514
- lap_start_time = datetime .strftime (adjust_time (item [0 ]["time" ], UTC_TIMEZONE ), "%Y-%m-%dT%H:%M:%SZ" )
537
+ lap_start_time = datetime .strftime (
538
+ adjust_time (item [0 ]["time" ], UTC_TIMEZONE ), "%Y-%m-%dT%H:%M:%SZ"
539
+ )
515
540
activity_lap = ET .Element ("Lap" , {"StartTime" : lap_start_time })
516
541
activity .append (activity_lap )
517
542
@@ -556,7 +581,9 @@ def parse_points_to_tcx(sport_data, points_dict_list):
556
581
tp = ET .Element ("Trackpoint" )
557
582
track .append (tp )
558
583
# Time
559
- time_stamp = datetime .strftime (adjust_time (p ["time" ], UTC_TIMEZONE ), "%Y-%m-%dT%H:%M:%SZ" )
584
+ time_stamp = datetime .strftime (
585
+ adjust_time (p ["time" ], UTC_TIMEZONE ), "%Y-%m-%dT%H:%M:%SZ"
586
+ )
560
587
time_label = ET .Element ("Time" )
561
588
time_label .text = time_stamp
562
589
@@ -595,7 +622,9 @@ def parse_points_to_tcx(sport_data, points_dict_list):
595
622
longi .text = str (p ["longitude" ])
596
623
position .append (longi )
597
624
# Extensions
598
- if p .get ("speed" ) is not None or (p .get ("cad" ) is not None and sports_type == "Running" ):
625
+ if p .get ("speed" ) is not None or (
626
+ p .get ("cad" ) is not None and sports_type == "Running"
627
+ ):
599
628
extensions = ET .Element ("Extensions" )
600
629
tp .append (extensions )
601
630
tpx = ET .Element ("ns3:TPX" )
@@ -611,12 +640,7 @@ def parse_points_to_tcx(sport_data, points_dict_list):
611
640
cad .text = str (round (p ["cad" ] / 2 ))
612
641
tpx .append (cad )
613
642
# Author
614
- author = ET .Element (
615
- "Author" ,
616
- {
617
- "xsi:type" : "Application_t"
618
- }
619
- )
643
+ author = ET .Element ("Author" , {"xsi:type" : "Application_t" })
620
644
training_center_database .append (author )
621
645
author_name = ET .Element ("Name" )
622
646
author_name .text = "Connect Api"
@@ -659,7 +683,7 @@ def run_oppo_sync(
659
683
sync_months ,
660
684
old_tracks_dates [0 ] if old_tracks_dates else 0 ,
661
685
with_download_gpx ,
662
- with_download_tcx
686
+ with_download_tcx ,
663
687
)
664
688
generator .sync_from_app (new_tracks )
665
689
@@ -690,7 +714,7 @@ def run_oppo_sync(
690
714
type = int ,
691
715
default = 6 ,
692
716
dest = "sync_months" ,
693
- help = "oppo has limited the data retrieve, so the default months we can sync is 6."
717
+ help = "oppo has limited the data retrieve, so the default months we can sync is 6." ,
694
718
)
695
719
options = parser .parse_args ()
696
720
run_oppo_sync (
@@ -699,5 +723,5 @@ def run_oppo_sync(
699
723
options .refresh_token ,
700
724
options .sync_months ,
701
725
options .with_gpx ,
702
- options .with_tcx
726
+ options .with_tcx ,
703
727
)
0 commit comments