diff --git a/proxy_scraper_checker/__init__.py b/proxy_scraper_checker/__init__.py index 1a90f32ef..529e524c7 100644 --- a/proxy_scraper_checker/__init__.py +++ b/proxy_scraper_checker/__init__.py @@ -2,10 +2,9 @@ import os as _os -from .typing_compat import Any as _Any - # Monkeypatch os.link to make aiofiles work on Termux if not hasattr(_os, "link"): + from .typing_compat import Any as _Any def _link(*args: _Any, **kwargs: _Any) -> None: # noqa: ARG001 raise RuntimeError diff --git a/proxy_scraper_checker/http.py b/proxy_scraper_checker/http.py index 157fed379..e2414b4e6 100644 --- a/proxy_scraper_checker/http.py +++ b/proxy_scraper_checker/http.py @@ -9,6 +9,11 @@ from .utils import bytes_decode +HEADERS: MappingProxyType[str, str] = MappingProxyType({ + hdrs.USER_AGENT: ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" # noqa: E501 + ) +}) SSL_CONTEXT = ssl.create_default_context(cafile=certifi.where()) @@ -16,11 +21,8 @@ class NoCharsetHeaderError(Exception): pass -HEADERS: MappingProxyType[str, str] = MappingProxyType({ - hdrs.USER_AGENT: ( - "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" # noqa: E501 - ) -}) +def fallback_charset_resolver(r: ClientResponse, b: bytes) -> str: # noqa: ARG001 + raise NoCharsetHeaderError @lru_cache(None) @@ -33,7 +35,3 @@ def get_response_text(*, response: ClientResponse, content: bytes) -> str: return content.decode(response.get_encoding()) except (NoCharsetHeaderError, UnicodeDecodeError): return bytes_decode(content) - - -def fallback_charset_resolver(r: ClientResponse, b: bytes) -> str: # noqa: ARG001 - raise NoCharsetHeaderError