Skip to content
This repository was archived by the owner on Sep 17, 2024. It is now read-only.

Commit 710838f

Browse files
authored
Merge pull request #77 from grrttedwards/fix/76/url-pattern-relaxation
Fix/76/url pattern relaxation
2 parents a829f21 + f333bf1 commit 710838f

File tree

7 files changed

+36
-33
lines changed

7 files changed

+36
-33
lines changed

changelog.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
Changelog
22

3+
* 9/29/2019 - v1.2.1
4+
Relaxed the pattern match for most URLs which may not be prefixed with "www.".
5+
36
* 9/28/2019 - v1.2.0
47
Integrated cfscrape as an anti-measure for Cloudflare bot-detection, for Curse and WoWAce. Node.js is now a requirement for Curse-based sites.
58

updater/site/abstract_site.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
from abc import ABC, abstractmethod
22

3+
import requests
4+
35
from updater.site.enum import GameVersion
46

57

@@ -8,8 +10,10 @@ class SiteError(Exception):
810

911

1012
class AbstractSite(ABC):
13+
# each implementation should declare a static _URLS list of
14+
_URLS: [str] = None
1115
# each implementation should create a static session for itself
12-
session = None
16+
session: requests.Session = None
1317

1418
def __init__(self, url: str, game_version: GameVersion):
1519
self.url = url
@@ -20,12 +24,11 @@ def handles(cls, url: str) -> bool:
2024
return any(supported_url in url for supported_url in cls.get_supported_urls())
2125

2226
@classmethod
23-
@abstractmethod
2427
def get_supported_urls(cls) -> [str]:
25-
# ABC for some reason won't enforce implementing this, perhaps
26-
# because it only checks when the class is instantiated?
27-
raise TypeError(f"Can't instantiate abstract class {cls.__name__}"
28-
" with abstract methods get_supported_urls")
28+
if not cls._URLS:
29+
raise NotImplementedError(f"Can't instantiate class {cls.__name__}"
30+
" without list of supported URLs cls._URLS")
31+
return cls._URLS
2932

3033
@abstractmethod
3134
def find_zip_url(self) -> str:

updater/site/curse.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,22 @@
77

88

99
class Curse(AbstractSite):
10-
_URL = 'https://www.curseforge.com/wow/addons/'
1110
_OLD_URL = 'https://mods.curse.com/addons/wow/'
1211
_OLD_PROJECT_URL = 'https://wow.curseforge.com/projects/'
1312

13+
_URLS = [
14+
'https://www.curseforge.com/wow/addons/',
15+
'https://curseforge.com/wow/addons/',
16+
_OLD_URL,
17+
_OLD_PROJECT_URL
18+
]
19+
1420
session = cfscrape.create_scraper("https://www.curseforge.com/")
1521

1622
def __init__(self, url: str, game_version: GameVersion):
1723
url = Curse._convert_old_curse_urls(url)
1824
super().__init__(url, game_version)
1925

20-
@classmethod
21-
def get_supported_urls(cls):
22-
return [cls._OLD_URL, cls._OLD_PROJECT_URL, cls._URL]
23-
2426
def find_zip_url(self):
2527
try:
2628
page = Curse.session.get(self.url)

updater/site/github.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,10 @@
77

88

99
class GitHub(AbstractSite):
10-
_URL = 'https://github.com/'
10+
_URLS = [
11+
'https://www.github.com/',
12+
'https://github.com/'
13+
]
1114

1215
session = requests.session()
1316

@@ -16,10 +19,6 @@ def __init__(self, url: str):
1619
url = (url + '/tree/master')
1720
super().__init__(url, GameVersion.agnostic)
1821

19-
@classmethod
20-
def get_supported_urls(cls):
21-
return [cls._URL]
22-
2322
def find_zip_url(self):
2423
return self.url.replace('/tree/', '/archive/', 1) + '.zip'
2524

updater/site/tukui.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,17 @@
66

77

88
class Tukui(AbstractSite):
9-
_URL = 'https://git.tukui.org/elvui/'
10-
latest_version = None
9+
_URLS = [
10+
'https://git.tukui.org/elvui/'
11+
]
1112

1213
session = requests.session()
1314

15+
latest_version = None
16+
1417
def __init__(self, url: str):
1518
super().__init__(url, GameVersion.agnostic)
1619

17-
@classmethod
18-
def get_supported_urls(cls) -> [str]:
19-
return [cls._URL]
20-
2120
def find_zip_url(self):
2221
version = self.get_latest_version()
2322
# like https://git.tukui.org/elvui/elvui/-/archive/v11.21/elvui-v11.21.zip

updater/site/wowace.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,10 @@
77

88

99
class WoWAce(AbstractSite):
10-
_URL = 'https://www.wowace.com/projects/'
10+
_URLS = [
11+
'https://www.wowace.com/projects/',
12+
'https://wowace.com/projects/'
13+
]
1114

1215
session = cfscrape.create_scraper()
1316

@@ -16,10 +19,6 @@ def __init__(self, url: str, game_version: GameVersion):
1619
raise NotImplementedError("Updating classic addons are not yet supported for WoWAce.")
1720
super().__init__(url, game_version)
1821

19-
@classmethod
20-
def get_supported_urls(cls):
21-
return [cls._URL]
22-
2322
def find_zip_url(self):
2423
return self.url + '/files/latest'
2524

updater/site/wowinterface.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,15 @@
77

88

99
class WoWInterface(AbstractSite):
10-
_URL = 'https://www.wowinterface.com/downloads/'
11-
10+
_URLS = [
11+
'https://www.wowinterface.com/downloads',
12+
'https://wowinterface.com/downloads'
13+
]
1214
session = requests.session()
1315

1416
def __init__(self, url: str, game_version: GameVersion):
1517
super().__init__(url, game_version)
1618

17-
@classmethod
18-
def get_supported_urls(cls) -> [str]:
19-
return [cls._URL]
20-
2119
def find_zip_url(self):
2220
downloadpage = self.url.replace('info', 'download')
2321
try:

0 commit comments

Comments
 (0)