-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathobtain_data.py
executable file
·338 lines (287 loc) · 12.3 KB
/
obtain_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
import argparse
import logging
import os
import pickle
from bs4 import BeautifulSoup
import pandas as pd
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.remote_connection import LOGGER as seleniumLogger
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from urllib3.connectionpool import log as urllibLogger
import common
# Avoid too much logging output from selenium and urllib.
seleniumLogger.setLevel(logging.WARNING)
urllibLogger.setLevel(logging.WARNING)
os.makedirs("work", exist_ok=True)
logging.basicConfig(
filename="work/example.log",
filemode="a",
format="%(asctime)s [%(levelname)s] %(message)s",
level=logging.DEBUG,
)
@common.with_session
def get_data_from_transfermarkt(session, url_all_teams, number_of_teams):
# Parse all participants and their id.
page = session.get(url_all_teams)
page.raise_for_status()
soup = BeautifulSoup(page.text, "html.parser")
team_links = soup.find_all("a", href=True)
teams = set()
for link in team_links:
href = link["href"]
if "/startseite/verein/" in href:
teams.add(href)
assert len(teams) == number_of_teams, teams
# Parse the player data team wise.
def parse_market_value(market_value_str) -> int:
market_value_list = market_value_str.split(" ", 2)
if len(market_value_list) != 3:
logging.warning(f"Invalid market value: {market_value_str}")
return 0
if not "€" in market_value_list[2]:
logging.warning(
f"Invalid currency: {market_value_list[2]}. Only euro supported."
)
return 0
market_value = float(market_value_list[0].replace(",", "."))
if "Tsd." in market_value_list[1]:
market_value *= 10**3
elif "Mio." in market_value_list[1]:
market_value *= 10**6
return int(market_value)
# TODO: dict with duplicated player name or plain list?
data = []
for team_path in list(teams):
team_url = "https://www.transfermarkt.de" + team_path
logging.debug(f"{team_url=}")
page = session.get(team_url)
page.raise_for_status()
soup = BeautifulSoup(page.text, "html.parser")
players_row = soup.find_all(attrs={"class": "even"}) + soup.find_all(
attrs={"class": "odd"}
)
for player in players_row:
info = player.find_all(class_="hauptlink")[0]
market_value = parse_market_value(player.find_all(class_="rechts")[0].text)
# "nationality" means "team" in this case, in order to refactor less.
data.append(
{
"name_": info.text.strip(),
"market_value": market_value,
"nationality": team_path.split("/")[1],
}
)
logging.debug("Transfermarkt data obtained.")
return pd.DataFrame(data)
def get_data_from_transfermarkt_em_2024():
url_all_teams = "https://www.transfermarkt.de/europameisterschaft-2024/teilnehmer/pokalwettbewerb/EM24/saison_id/2023"
number_of_teams = 24
return get_data_from_transfermarkt(url_all_teams, number_of_teams)
def get_data_from_transfermarkt_bundesliga_2023():
url_all_teams = "https://www.transfermarkt.de/bundesliga/startseite/wettbewerb/L1"
number_of_teams = 18
return get_data_from_transfermarkt(url_all_teams, number_of_teams)
@common.with_driver
def get_available_players_fantasy(driver):
def parse_ingame_value(ingame_value_str) -> int:
ingame_value = float(ingame_value_str[:-1])
value_prefix = ingame_value_str[-1]
if value_prefix == "M":
ingame_value *= 10**6
else:
raise ValueError(f"Invalid prefix: {value_prefix}")
return int(ingame_value)
driver.get("https://gaming.uefa.com/de/uefaeuro2020fantasyfootball/create-team")
# wait until page is fully loaded
player_filter = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.ID, "PlayreFilters"))
)
player_table = player_filter.find_element_by_css_selector("div[role='rowgroup']")
# accept all cookies
cookies_button = driver.find_element_by_id("onetrust-accept-btn-handler")
cookies_button.click()
def parse_playes_from_table(player_table):
"""Parse the players from the currently visible table section.
The element stays the same, but the content changes.
"""
player_data = set()
players = player_table.find_elements_by_xpath("./div")
for player in players:
name_elem = player.find_element_by_class_name("si-plyr-name")
value_elem = player.find_element_by_class_name("si-currency")
currency_elem, value_elem = value_elem.find_elements_by_css_selector("span")
position_elem = player.find_element_by_class_name("si-pos")
if currency_elem.text != "€":
logging.warning(f"Invalid currency {currency_elem.text}. Skipping.")
continue
if name_elem.text and position_elem.text and value_elem.text:
player_data.add(
common.Player(
name=name_elem.text,
ingame_position=common.Position[position_elem.text],
ingame_value=parse_ingame_value(value_elem.text),
)
)
else:
logging.warning(
f"Empty string: {name_elem.text}, {position_elem.text}, {value_elem.text}. Skipping."
)
return player_data
player_data = set()
wrapped_table = player_filter.find_element_by_class_name("si-list-wrap")
slider_element = wrapped_table.find_element_by_xpath("div[1]/div/div[3]/div")
last_slider_position = slider_element.location
last_player_data = None
while True:
new_player_data = parse_playes_from_table(player_table)
if last_player_data is not None and not (last_player_data & new_player_data):
raise Exception("Scrolling failed. Common data is required.")
last_player_data = new_player_data
player_data.update(new_player_data)
logging.debug(f"{len(player_data)} players parsed.")
# Scroll down only a little bit to get the next players.
# Couldn't find a better way for now.
action = ActionChains(driver)
action.click_and_hold(on_element=slider_element)
action.move_by_offset(0, 7)
action.perform()
if last_slider_position == slider_element.location:
break
last_slider_position = slider_element.location
return player_data
def match_first_letter(current_player, match_options):
match_options_refined = []
for option in match_options:
if (
current_player.first_name()[0] == option.first_name()[0]
or current_player.first_name(special_chars=False)[0]
== option.first_name(special_chars=False)[0]
):
match_options_refined.append(option)
if not match_options_refined:
logging.debug("Couldn't find matching player. Skipping.")
return None
elif len(match_options_refined) == 1:
logging.debug("Found matching player.")
return match_options_refined[0]
logging.debug("Found too many matching players. Skipping.")
return None
def merge_player_data(available_players, player_data_transfermarkt):
"""Try to find stats for each available player by matching names."""
# Special cases, because of different naming.
special_mappings = {
"A. Zabolotny": "Anton Zabolotnyi",
"Danilo": "Danilo Pereira",
"E. Bardi": "Enis Bardhi",
"M. Kerem Aktürkoglu": "Kerem Aktürkoglu",
"N. Nikolić": "Nemanja Nikolics",
"T. Alcántara": "Thiago",
}
complete_players = []
missing_players = []
for player in available_players:
logging.debug(f"Look up {player.name}.")
possible_matches = []
for p in player_data_transfermarkt:
if p.family_name() == player.family_name():
logging.debug("Found in normal lookup.")
possible_matches.append(p)
elif p.family_name(special_chars=False) == player.family_name(
special_chars=False
):
logging.debug("Found in lookup with special chars replaced.")
possible_matches.append(p)
elif special_mappings.get(player.name, "") == p.name:
logging.debug("Found in lookup with hardcoded names.")
possible_matches.append(p)
matched_player = None
if possible_matches:
if len(possible_matches) == 1:
logging.debug("Found matching player.")
matched_player = possible_matches[0]
else:
logging.debug(
f"Too many family name matches: {possible_matches}. Try to find players via first letter of the first name {player.first_name()}."
)
matched_player = match_first_letter(player, possible_matches)
else:
logging.debug("Not found. Skipping.")
if matched_player is not None:
complete_players.append(matched_player + player)
else:
missing_players.append(player)
logging.info(f"Complete_players: {len(complete_players)}")
logging.info(
f"Missing players: {len(missing_players)}: {[p.name for p in missing_players]}\n"
+ "The reason might be duplicated name, injury or dismissal."
)
return complete_players
def data_to_csv(data, file_):
with open(file_, "w") as outfile:
outfile.write(
"\n".join(
["nationality,name_,cost_ingame,position,market_value"]
+ [
f"{p.nationality},{p.name},{p.ingame_value},{p.ingame_position.value},{p.market_value}"
for p in data
]
)
)
def get_data(filename, obtain_function, force):
"""Try to use cached data. If not possible, obtain data with function and write to cache."""
if not os.path.isfile(filename) or force:
data = obtain_function()
with open(filename, "wb") as outfile:
pickle.dump(data, outfile)
else:
logging.info(f'Use cached data from "{filename}".')
with open(filename, "rb") as infile:
data = pickle.load(infile)
return data
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--force", action="store_true", help="Force refreshing of the cache."
)
args = parser.parse_args()
player_data_transfermarkt = get_data(
"work/transfermarkt.dat", get_data_from_transfermarkt_em_2024, args.force
)
player_data_kicker = pd.read_csv(
"https://classic.kicker-libero.de/api/sportsdata/v1/players-details/se-k01072024.csv",
delimiter=";",
)
player_data_kicker = player_data_kicker.rename(
columns={
"Angezeigter Name": "name_",
"Marktwert": "cost_ingame",
"Position": "position",
}
)
player_data_kicker["name_"] = player_data_kicker["name_"].str.lower()
# special cases
player_data_kicker["name_"] = player_data_kicker["name_"].apply(common.idfy)
player_data_kicker["name_"] = (
player_data_kicker["name_"]
.str.replace("dion drena beljo", "dion beljo", regex=False)
.str.replace("eric junior dina ebimbe", "junior dina ebimbe", regex=False)
.str.replace("jean-manuel mbom", "jean manuel mbom", regex=False)
.str.replace("kouadio kone", "manu kone", regex=False)
.str.replace("omar haktab traore", "omar traore", regex=False)
.str.replace("rafael santos borre", "rafael borre", regex=False)
)
player_data_transfermarkt["name_"] = player_data_transfermarkt["name_"].apply(
common.idfy
)
player_data_transfermarkt["name_"] = player_data_transfermarkt["name_"].str.replace(
"mateu morey bauzà", "mateu morey", regex=False
)
# use how="outer" for debugging
player_data = player_data_kicker.merge(
player_data_transfermarkt, on="name_", how="inner"
)
player_data.to_csv("work/test.csv", index=False)
if __name__ == "__main__":
main()