Skip to content

Commit 74a0600

Browse files
author
Gabriella Martin
committed
test(docs): ✅ update test to use httpx instead of grequests
1 parent 388334e commit 74a0600

File tree

1 file changed

+36
-32
lines changed

1 file changed

+36
-32
lines changed

tests/test_docs.py

Lines changed: 36 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
1-
from bs4 import BeautifulSoup
2-
import grequests
3-
import markdown
1+
import asyncio
42
import os
53
import unittest
4+
5+
import httpx
6+
import markdown
7+
from bs4 import BeautifulSoup
8+
69
from . import (
10+
DOC_NAMES,
11+
DOCS_DIR,
712
SCHEMA_NAMES,
813
all_properties,
9-
DOCS_DIR,
10-
DOC_NAMES,
1114
property_doc_name,
1215
schema_enum_registry,
1316
)
@@ -117,44 +120,45 @@ def error_msg(schema_name, value, enum):
117120
schema_name, v, enum
118121
) # noqa
119122

120-
def test_urls_in_docs(s):
121-
def exception(request, exception):
122-
return f"{request} - {exception}"
123+
def test_urls_in_docs(self):
124+
async def async_requests(urls):
125+
async with httpx.AsyncClient(timeout=60) as client:
126+
responses = (client.get(url) for url in urls)
127+
results = await asyncio.gather(*responses, return_exceptions=True)
123128

124-
def async_requests(urls):
125-
results = grequests.map(
126-
(grequests.get(u) for u in urls), exception_handler=exception, size=100
127-
)
128129
return results
129130

130-
urls = []
131+
urls = ["https://www.google.com/", "https://www.google.com/404", "sdfghjk"]
131132

132-
for docname in DOC_NAMES:
133-
filename = os.path.join(DOCS_DIR, f"{docname}.md")
134-
with open(filename) as f:
135-
doc_html = markdown.markdown(f.read())
136-
soup = BeautifulSoup(doc_html, features="html.parser")
137-
links = soup.find_all("a")
138-
for link in links:
139-
url = link.get("href")
140-
if not url.startswith("http"):
141-
raise ValueError(f"Invalid URL in {docname}: {url}")
133+
# for docname in DOC_NAMES:
134+
# filename = os.path.join(DOCS_DIR, f"{docname}.md")
135+
# with open(filename) as f:
136+
# doc_html = markdown.markdown(f.read())
137+
# soup = BeautifulSoup(doc_html, features="html.parser")
138+
# links = soup.find_all("a")
139+
# for link in links:
140+
# url = link.get("href")
141+
# if not url.startswith("http"):
142+
# raise ValueError(f"Invalid URL in {docname}: {url}")
142143

143-
urls.append(url)
144+
# urls.append(url)
144145

145-
results = async_requests(urls)
146+
results = asyncio.run(async_requests(urls))
146147

147148
warns = []
148149
not_founds = []
149-
for resp in results:
150-
if not resp.ok:
151-
warns.append(f"failed {resp.status_code}: {resp.url}")
152-
if resp.status_code in [404]:
153-
not_founds.append(resp.url)
150+
for response in results:
151+
if isinstance(response, httpx.HTTPError):
152+
warns.append(f"failed {response!s}: {response.request.url!s}")
153+
else:
154+
if not response.is_success:
155+
warns.append(f"failed {response.status_code}: {response.url!s}")
154156

155-
if not_founds:
156-
raise ValueError(f"URLs not found: \n {not_founds}")
157+
if response.status_code in (404,):
158+
not_founds.append(str(response.url))
157159

158160
print("\n=== Minor URL link warnings ===\n")
159161
for w in warns:
160162
print(w)
163+
164+
assert not not_founds, f"URLs not found: \n {not_founds}"

0 commit comments

Comments
 (0)