|
1 | 1 | import pytest
|
| 2 | +from contextlib import nullcontext as does_not_raise |
2 | 3 |
|
3 | 4 | from scraper.add_product import add_product
|
| 5 | +from scraper.exceptions import WebsiteNotSupported |
4 | 6 |
|
5 | 7 | test_domains = [
|
6 |
| - "https://www.amazon.com/", |
7 |
| - "https://www.ebay.com/", |
8 |
| - "https://www.komplett.dk/", |
9 |
| - "https://www.proshop.dk/", |
10 |
| - "https://www.computersalg.dk/", |
11 |
| - "https://www.elgiganten.dk/", |
12 |
| - "https://www.avxperten.dk/", |
13 |
| - "https://www.av-cables.dk/", |
14 |
| - "https://www.power.dk/", |
15 |
| - "https://www.expert.dk/", |
16 |
| - "https://www.mm-vision.dk/", |
17 |
| - "https://www.coolshop.dk/", |
18 |
| - "https://www.sharkgaming.dk/", |
19 |
| - "https://www.newegg.com/", |
20 |
| - "https://www.hifiklubben.dk/", |
| 8 | + ("https://www.amazon.com/", does_not_raise()), |
| 9 | + ("https://www.ebay.com/", does_not_raise()), |
| 10 | + ("https://www.komplett.dk/", does_not_raise()), |
| 11 | + ("https://www.proshop.dk/", does_not_raise()), |
| 12 | + ("https://www.computersalg.dk/", does_not_raise()), |
| 13 | + ("https://www.elgiganten.dk/", does_not_raise()), |
| 14 | + ("https://www.avxperten.dk/", does_not_raise()), |
| 15 | + ("https://www.av-cables.dk/", does_not_raise()), |
| 16 | + ("https://www.power.dk/", does_not_raise()), |
| 17 | + ("https://www.expert.dk/", does_not_raise()), |
| 18 | + ("https://www.mm-vision.dk/", does_not_raise()), |
| 19 | + ("https://www.coolshop.dk/", does_not_raise()), |
| 20 | + ("https://www.sharkgaming.dk/", does_not_raise()), |
| 21 | + ("https://www.newegg.com/", does_not_raise()), |
| 22 | + ("https://www.hifiklubben.dk/", does_not_raise()), |
| 23 | + ("https://www.notsupported.com/", pytest.raises(WebsiteNotSupported)), |
21 | 24 | ]
|
22 | 25 |
|
23 | 26 |
|
24 | 27 | # Tests to make sure the websites that are supported can be added to be scraped
|
25 |
| -@pytest.mark.parametrize("url", test_domains) |
26 |
| -def test_add_product(url, mocker): |
| 28 | +@pytest.mark.parametrize("url,expectation", test_domains) |
| 29 | +def test_add_product(url, expectation, mocker): |
27 | 30 | mocker.patch("scraper.Scraper.scrape_info", return_value=None)
|
28 | 31 | mocker.patch("scraper.Scraper.save_info", return_value=None)
|
29 | 32 | mocker.patch("scraper.filemanager.Filemanager.add_product_to_csv", return_value=None)
|
30 | 33 | mocker.patch("scraper.add_product.check_if_product_exists", return_value=False)
|
31 | 34 | mocker.patch("scraper.add_product.check_if_product_exists_csv", return_value=False)
|
32 | 35 | mocker.patch("scraper.add_product.add_product_to_records", return_value=None)
|
33 | 36 |
|
34 |
| - # expect no exceptions to raise |
35 |
| - add_product("test", url) |
| 37 | + with expectation: |
| 38 | + add_product("test", url) |
0 commit comments