Skip to content

Commit

Permalink
Test fixes, getting dependencies for pinned requirements in advance.
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Neidhart <thomas.neidhart@gmail.com>
  • Loading branch information
netomi committed Nov 30, 2023
1 parent 43aa0c5 commit 8a0bf87
Show file tree
Hide file tree
Showing 7 changed files with 63 additions and 28 deletions.
3 changes: 2 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,5 @@ tomli==1.2.3
tqdm==4.64.0
twine==3.8.0
typed-ast==1.5.4
webencodings==0.5.1
webencodings==0.5.1
pytest-asyncio==0.21.1
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@ toml==0.10.2
urllib3==2.1.0
zipp==3.17.0
aiohttp==3.9.1
aiofiles==23.2.1
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ install_requires =
mock >= 3.0.5
packvers >= 21.5
aiohttp >= 3.9
aiofiles >= 23.1

[options.packages.find]
where = src
Expand All @@ -88,6 +89,7 @@ testing =
black
isort
pytest-rerunfailures
pytest-asyncio >= 0.21

docs =
Sphinx>=5.0.2
Expand Down
31 changes: 28 additions & 3 deletions src/python_inspector/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from _packagedcode.models import DependentPackage
from _packagedcode.models import PackageData
from _packagedcode.pypi import PipRequirementsFileHandler
from _packagedcode.pypi import PipRequirementsFileHandler, get_resolved_purl
from _packagedcode.pypi import PythonSetupPyHandler
from _packagedcode.pypi import can_process_dependent_package
from python_inspector import dependencies
Expand All @@ -38,6 +38,7 @@
from python_inspector.resolution import get_python_version_from_env_tag
from python_inspector.resolution import get_reqs_insecurely
from python_inspector.resolution import get_requirements_from_python_manifest
from python_inspector.utils import Candidate
from python_inspector.utils_pypi import PLATFORMS_BY_OS
from python_inspector.utils_pypi import PYPI_SIMPLE_URL
from python_inspector.utils_pypi import Environment
Expand Down Expand Up @@ -229,7 +230,7 @@ def resolve_dependencies(
if not direct_dependencies:
return Resolution(
packages=[],
resolution={},
resolution=[],
files=files,
)

Expand Down Expand Up @@ -300,7 +301,7 @@ async def get_pypi_data(package):
return data

if verbose:
printer(f"retrieve data from pypi:")
printer(f"retrieve package data from pypi:")

return await asyncio.gather(*[get_pypi_data(package) for package in purls])

Expand Down Expand Up @@ -390,6 +391,8 @@ def get_resolved_dependencies(
ignore_errors=ignore_errors,
)

# gather version data for all requirements concurrently in advance.

async def gather_version_data():
async def get_version_data(name: str):
versions = await provider.fill_versions_for_package(name)
Expand All @@ -406,6 +409,28 @@ async def get_version_data(name: str):

asyncio.run(gather_version_data())

# gather dependencies for all pinned requirements concurrently in advance.

async def gather_dependencies():
async def get_dependencies(requirement: Requirement):
purl = PackageURL(type="pypi", name=requirement.name)
resolved_purl = get_resolved_purl(purl=purl, specifiers=requirement.specifier)

if resolved_purl:
purl = resolved_purl.purl
candidate = Candidate(requirement.name, purl.version, requirement.extras)
await provider.fill_requirements_for_package(purl, candidate)

if verbose:
printer(f" retrieved dependencies for requirement '{str(purl)}'")

if verbose:
printer(f"dependencies:")

return await asyncio.gather(*[get_dependencies(requirement) for requirement in requirements])

asyncio.run(gather_dependencies())

resolver = Resolver(
provider=provider,
reporter=BaseReporter(),
Expand Down
3 changes: 2 additions & 1 deletion src/python_inspector/resolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,6 +438,7 @@ async def _get_versions_for_package_from_repo(
)
if valid_wheel_present or pypi_valid_python_version:
versions.append(version)

return versions

async def _get_versions_for_package_from_pypi_json_api(self, name: str) -> List[Version]:
Expand Down Expand Up @@ -556,7 +557,7 @@ async def _get_requirements_for_package_from_pypi_json_api(
return []
info = resp.get("info") or {}
requires_dist = info.get("requires_dist") or []
return requires_dist
return list(map(lambda r: Requirement(r), requires_dist))

def get_candidates(
self,
Expand Down
32 changes: 17 additions & 15 deletions src/python_inspector/utils_pypi.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
import tempfile
import time
from collections import defaultdict
from typing import List, Dict
from typing import List, Dict, Union, Tuple
from typing import NamedTuple
from urllib.parse import quote_plus
from urllib.parse import unquote
from urllib.parse import urlparse
from urllib.parse import urlunparse

import aiofiles
import aiohttp
import attr
import packageurl
Expand Down Expand Up @@ -1593,7 +1594,7 @@ async def fetch_links(
name using the `index_url` of this repository.
"""
package_url = f"{self.index_url}/{normalized_name}"
text = await CACHE.get(
text, _ = await CACHE.get(
path_or_url=package_url,
credentials=self.credentials,
as_text=True,
Expand Down Expand Up @@ -1671,7 +1672,7 @@ async def get(
force=False,
verbose=False,
echo_func=None,
):
) -> Tuple[Union[str, bytes], str]:
"""
Return the content fetched from a ``path_or_url`` through the cache.
Raise an Exception on errors. Treats the content as text if as_text is
Expand All @@ -1692,13 +1693,13 @@ async def get(
echo_func=echo_func,
)
wmode = "w" if as_text else "wb"
with open(cached, wmode) as fo:
fo.write(content)
return content
async with aiofiles.open(cached, mode=wmode) as fo:
await fo.write(content)
return content, cached
else:
if TRACE_DEEP:
print(f" FILE CACHE HIT: {path_or_url}")
return get_local_file_content(path=cached, as_text=as_text)
return await get_local_file_content(path=cached, as_text=as_text), cached


CACHE = Cache()
Expand Down Expand Up @@ -1730,13 +1731,13 @@ async def get_file_content(
elif path_or_url.startswith("file://") or (
path_or_url.startswith("/") and os.path.exists(path_or_url)
):
return get_local_file_content(path=path_or_url, as_text=as_text)
return await get_local_file_content(path=path_or_url, as_text=as_text)

else:
raise Exception(f"Unsupported URL scheme: {path_or_url}")


def get_local_file_content(path, as_text=True):
async def get_local_file_content(path: str, as_text=True) -> str:
"""
Return the content at `url` as text. Return the content as bytes is
`as_text` is False.
Expand All @@ -1745,8 +1746,8 @@ def get_local_file_content(path, as_text=True):
path = path[7:]

mode = "r" if as_text else "rb"
with open(path, mode) as fo:
return fo.read()
async with aiofiles.open(path, mode=mode) as fo:
return await fo.read()


class RemoteNotFetchedException(Exception):
Expand Down Expand Up @@ -1828,7 +1829,7 @@ async def fetch_and_save(
errors. Treats the content as text if as_text is True otherwise as treat as
binary.
"""
content = await CACHE.get(
content, path = await CACHE.get(
path_or_url=path_or_url,
credentials=credentials,
as_text=as_text,
Expand All @@ -1837,7 +1838,8 @@ async def fetch_and_save(
)

output = os.path.join(dest_dir, filename)
wmode = "w" if as_text else "wb"
with open(output, wmode) as fo:
fo.write(content)
if os.path.exists(output):
os.remove(output)

os.symlink(os.path.abspath(path), output)
return content
19 changes: 11 additions & 8 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from netrc import netrc
from unittest import mock

import pytest
from commoncode.testcase import FileDrivenTesting
from test_cli import check_json_file_results

Expand Down Expand Up @@ -47,22 +48,23 @@ def test_get_netrc_auth_with_no_matching_url():
assert get_netrc_auth(url="https://pypi2.org/simple", netrc=parsed_netrc) == (None, None)


@pytest.mark.asyncio
@mock.patch("python_inspector.utils_pypi.CACHE.get")
def test_fetch_links(mock_get):
async def test_fetch_links(mock_get):
file_name = test_env.get_test_loc("psycopg2.html")
with open(file_name) as file:
mock_get.return_value = file.read()
links = PypiSimpleRepository().fetch_links(normalized_name="psycopg2")
links = await PypiSimpleRepository().fetch_links(normalized_name="psycopg2")
result_file = test_env.get_temp_file("json")
expected_file = test_env.get_test_loc("psycopg2-links-expected.json", must_exist=False)
with open(result_file, "w") as file:
json.dump(links, file, indent=4)
check_json_file_results(result_file, expected_file)
# Testing relative links
realtive_links_file = test_env.get_test_loc("fetch_links_test.html")
with open(realtive_links_file) as realtive_file:
mock_get.return_value = realtive_file.read()
relative_links = PypiSimpleRepository().fetch_links(normalized_name="sources.whl")
relative_links_file = test_env.get_test_loc("fetch_links_test.html")
with open(relative_links_file) as relative_file:
mock_get.return_value = relative_file.read()
relative_links = await PypiSimpleRepository().fetch_links(normalized_name="sources.whl")
relative_links_result_file = test_env.get_temp_file("json")
relative_links_expected_file = test_env.get_test_loc(
"relative-links-expected.json", must_exist=False
Expand All @@ -83,8 +85,9 @@ def test_parse_reqs():
check_json_file_results(result_file, expected_file)


def test_get_sdist_file():
sdist_file = fetch_and_extract_sdist(
@pytest.mark.asyncio
async def test_get_sdist_file():
sdist_file = await fetch_and_extract_sdist(
repos=tuple([PypiSimpleRepository()]),
candidate=Candidate(name="psycopg2", version="2.7.5", extras=None),
python_version="3.8",
Expand Down

0 comments on commit 8a0bf87

Please sign in to comment.