diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 7e22f6a4d..fb2936f01 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -2,9 +2,9 @@ name: e2e on: push: - branches: [ "main" ] + branches: ["main"] pull_request: - branches: [ "main" ] + branches: ["main"] permissions: contents: read @@ -14,37 +14,43 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Install dependencies - run: | - sudo apt-get install meson scdoc python3-hatchling python3-build python3-installer python3-filelock shellcheck - python3 -m pip install --upgrade pip - pip install uv - - name: Initialize submodules - run: | - git submodule update --init --recursive - - name: Make user install - run: | - ./configure.sh --user-install - make install - - name: Run shellcheck - run: | - shellcheck tests/*.sh - - name: Test steamrt install - run: | - sh tests/test_install.sh - rm -rf "$HOME/.local/share/umu" "$HOME/Games/umu" "$HOME/.local/share/Steam/compatibilitytools.d" - - name: Test steamrt update - run: | - sh tests/test_update.sh - rm -rf "$HOME/.local/share/umu" "$HOME/Games/umu" "$HOME/.local/share/Steam/compatibilitytools.d" - - name: Test winetricks - run: | - sh tests/test_winetricks.sh - rm -rf "$HOME/.local/share/umu" "$HOME/.local/share/Steam/compatibilitytools.d" - - name: Test configuration file - run: | - uv python install 3.11 - uv run --python 3.11 -- sh tests/test_config.sh + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Install dependencies + run: | + sudo apt-get install meson scdoc python3-hatchling python3-build python3-installer python3-filelock shellcheck + python3 -m pip install --upgrade pip + pip install uv + - name: Initialize submodules + run: | + git submodule update --init --recursive + - name: Make user install + run: | + ./configure.sh --user-install + make install + - name: Run shellcheck + run: | + shellcheck tests/*.sh + - name: Setup venv + run: | + uv venv --python 3.11 + - name: Test steamrt install + run: | + source .venv/bin/activate + sh tests/test_install.sh + rm -rf "$HOME/.local/share/umu" "$HOME/Games/umu" "$HOME/.local/share/Steam/compatibilitytools.d" + - name: Test steamrt update + run: | + source .venv/bin/activate + sh tests/test_update.sh + rm -rf "$HOME/.local/share/umu" "$HOME/Games/umu" "$HOME/.local/share/Steam/compatibilitytools.d" + - name: Test winetricks + run: | + source .venv/bin/activate + sh tests/test_winetricks.sh + rm -rf "$HOME/.local/share/umu" "$HOME/.local/share/Steam/compatibilitytools.d" + - name: Test configuration file + run: | + source .venv/bin/activate + sh tests/test_config.sh diff --git a/.github/workflows/static.yml b/.github/workflows/static.yml index 10610e1ba..c95fbc984 100644 --- a/.github/workflows/static.yml +++ b/.github/workflows/static.yml @@ -2,9 +2,9 @@ name: mypy on: push: - branches: [ "main" ] + branches: ["main"] pull_request: - branches: [ "main" ] + branches: ["main"] permissions: contents: read @@ -13,20 +13,25 @@ jobs: build: strategy: matrix: - version: ["3.10"] + version: ["3.11"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.version }} - - name: Install dependencies - run: | - python3 -m pip install --upgrade pip - - name: Check types with mypy - run: | - pip install mypy - mypy . + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.version }} + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip uv mypy + - name: Setup venv + run: | + uv venv --python 3.11 + source .venv/bin/activate + uv pip install -r requirements.in + - name: Check types with mypy + run: | + source .venv/bin/activate + mypy --python-version 3.11 . diff --git a/.github/workflows/umu-python.yml b/.github/workflows/umu-python.yml index 782c8aed8..a283d464c 100644 --- a/.github/workflows/umu-python.yml +++ b/.github/workflows/umu-python.yml @@ -2,9 +2,9 @@ name: umu-launcher on: push: - branches: [ "main" ] + branches: ["main"] pull_request: - branches: [ "main" ] + branches: ["main"] permissions: contents: read @@ -15,28 +15,40 @@ jobs: matrix: # tomllib requires Python 3.11 # Ubuntu latest (Jammy) provides Python 3.10 - version: ["3.10", "3.11", "3.12", "3.13"] + version: ["3.11", "3.12", "3.13"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.version }} - - name: Install dependencies - run: | - python3 -m pip install --upgrade pip - pip install ruff - pip install python-xlib - pip install filelock - - name: Lint umu_*.py files with Ruff - run: | - pip install ruff - ruff check --output-format github ./umu/umu_*.py - - name: Test with unittest - run: python3 ./umu/umu_test.py - - name: Test with unittest for plugins - if: ${{ matrix.version == '3.11' || matrix.version == '3.12' || matrix.version == '3.13' }} - run: python3 ./umu/umu_test_plugins.py + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.version }} + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + pip install ruff + pip install python-xlib + pip install filelock + pip install uv + - name: Lint umu_*.py files with Ruff + run: | + pip install ruff + ruff check --output-format github ./umu/umu_*.py + - name: Setup venv + run: | + uv venv --python $pyver + source .venv/bin/activate + uv pip install -r requirements.in + env: + pyver: ${{ matrix.version }} + - name: Test with unittest + run: | + source .venv/bin/activate + python3 ./umu/umu_test.py + - name: Test with unittest for plugins + if: ${{ matrix.version == '3.11' || matrix.version == '3.12' || matrix.version == '3.13' }} + run: | + source .venv/bin/activate + python3 ./umu/umu_test_plugins.py diff --git a/Makefile.in b/Makefile.in index 62b22d63f..55a700c82 100644 --- a/Makefile.in +++ b/Makefile.in @@ -21,7 +21,7 @@ FLATPAK ?= xfalse .PHONY: all ifeq ($(FLATPAK), xtrue) -all: umu-dist umu-launcher +all: umu-dist umu-launcher umu-vendored endif .PHONY: install @@ -30,8 +30,8 @@ SOURCE_DATE_EPOCH = $(shell LC_ALL=C date --date='@1580601600') all: zipapp install: zipapp-install else -all: umu-dist umu-docs umu-launcher -install: umu-install umu-launcher-install +all: umu-dist umu-docs umu-launcher umu-vendored +install: umu-install umu-launcher-install umu-vendored-install endif @@ -99,20 +99,17 @@ umu-launcher-dist-install: umu-launcher-install: umu-launcher-dist-install umu-launcher-bin-install -$(OBJDIR)/.build-umu-subprojects: | $(OBJDIR) - $(info :: Building subprojects ) - pip3 install -r requirements.in -t $(OBJDIR) +$(OBJDIR)/.build-umu-vendored: | $(OBJDIR) + $(info :: Building vendored dependencies ) + python3 -m pip install urllib3 -t $(OBJDIR) -.PHONY: umu-subprojects -umu-subprojects: $(OBJDIR)/.build-umu-subprojects +.PHONY: umu-vendored +umu-vendored: $(OBJDIR)/.build-umu-vendored -umu-subprojects-install: +umu-vendored-install: umu-vendored $(info :: Installing subprojects ) - install -d $(DESTDIR)$(PYTHONDIR) - cp -r $(OBJDIR)/*-info $(DESTDIR)$(PYTHONDIR) - cp -r $(OBJDIR)/Xlib $(DESTDIR)$(PYTHONDIR) - cp -r $(OBJDIR)/filelock $(DESTDIR)$(PYTHONDIR) - cp $(OBJDIR)/six.py $(DESTDIR)$(PYTHONDIR) + install -d $(DESTDIR)$(PYTHONDIR)/umu/_vendor + cp -r $(OBJDIR)/urllib3 $(DESTDIR)$(PYTHONDIR)/umu/_vendor $(OBJDIR): @mkdir -p $(@) @@ -153,7 +150,7 @@ ZIPAPP_VENV := $(OBJDIR)/zipapp_venv $(OBJDIR)/.build-zipapp: | $(OBJDIR) $(info :: Building umu-launcher as zipapp ) $(PYTHON_INTERPRETER) -m venv $(ZIPAPP_VENV) - . $(ZIPAPP_VENV)/bin/activate && python3 -m pip install -t "$(ZIPAPP_STAGING)" -U --no-compile . + . $(ZIPAPP_VENV)/bin/activate && python3 -m pip install -t "$(ZIPAPP_STAGING)" -U --no-compile . truststore cp umu/__main__.py "$(ZIPAPP_STAGING)" find "$(ZIPAPP_STAGING)" -exec touch -h -d "$(SOURCE_DATE_EPOCH)" {} + . $(ZIPAPP_VENV)/bin/activate && python3 -m zipapp $(ZIPAPP_STAGING) -o $(ZIPAPP) -p "$(PYTHON_INTERPRETER)" -c diff --git a/README.md b/README.md index 566e892bf..15277ab5d 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ Borderlands 3 from EGS store. ## Building -Building umu-launcher currently requires `bash`, `make`, and `scdoc` for distribution, as well as the following Python build tools: [build](https://github.com/pypa/build), [hatchling](https://github.com/pypa/hatch), and [installer](https://github.com/pypa/installer). +Building umu-launcher currently requires `bash`, `make`, and `scdoc` for distribution, as well as the following Python build tools: [build](https://github.com/pypa/build), [hatchling](https://github.com/pypa/hatch), [installer](https://github.com/pypa/installer), and [pip](https://github.com/pypa/pip). To build umu-launcher, after downloading and extracting the source code from this repository, change into the newly extracted directory ```shell @@ -95,7 +95,7 @@ Change the `--prefix` as fit for your distribution, for example `/usr/local`, or Then run `make` to build. After a successful build the resulting files should be available in the `./builddir` directory -## Installing +## Installing To install umu-launcher run the following command after completing the steps described above ```shell diff --git a/packaging/deb/debian/control b/packaging/deb/debian/control index 1fc434798..ce02b0b79 100644 --- a/packaging/deb/debian/control +++ b/packaging/deb/debian/control @@ -13,6 +13,7 @@ Build-Depends: python3-hatchling, python3-installer, python3-build, + python3-pip, Standards-Version: 4.6.2 Homepage: https://github.com/Open-Wine-Components/umu-launcher Vcs-Browser: https://github.com/Open-Wine-Components/umu-launcher diff --git a/packaging/deb/ubuntu/control b/packaging/deb/ubuntu/control index 1fc434798..ce02b0b79 100644 --- a/packaging/deb/ubuntu/control +++ b/packaging/deb/ubuntu/control @@ -13,6 +13,7 @@ Build-Depends: python3-hatchling, python3-installer, python3-build, + python3-pip, Standards-Version: 4.6.2 Homepage: https://github.com/Open-Wine-Components/umu-launcher Vcs-Browser: https://github.com/Open-Wine-Components/umu-launcher diff --git a/pyproject.toml b/pyproject.toml index 33cd16d71..2fc0ca28c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,14 @@ classifiers = [ "Topic :: Software Development :: Libraries :: Python Modules", ] urls = { repository = "https://github.com/Open-Wine-Components/umu-launcher" } -dependencies = ["python-xlib>=0.33", "filelock>=3.9.0"] +# Note: urllib3 is a vendored dependency. When using our Makefile, it will be +# installed automatically. +dependencies = ["python-xlib>=0.33", "filelock>=3.9.0", "urllib3>=2.0.0,<3.0.0"] + +[project.optional-dependencies] +# Recommended +# For network requests, use the system's CA bundle instead of certifi's +cli = ["truststore"] [project.scripts] umu-run = "umu.__main__:main" diff --git a/requirements.in b/requirements.in index 8b4ac5f4c..49ce05aec 100644 --- a/requirements.in +++ b/requirements.in @@ -1,2 +1,3 @@ python-xlib>=0.33 -filelock>=3.15.4 +filelock>=3.15.4 +urllib3>=2.0.0,<3.0.0 diff --git a/umu/__main__.py b/umu/__main__.py index 82d1b50fc..21ea6d791 100644 --- a/umu/__main__.py +++ b/umu/__main__.py @@ -1,5 +1,13 @@ import os import sys +from zipfile import is_zipfile + +if not is_zipfile(os.path.dirname(__file__)): # noqa: PTH120 + sys.path.insert( + 0, + f"{os.path.dirname(os.path.realpath(__file__, strict=True))}/_vendor", # noqa: PTH120 + ) + from argparse import ArgumentParser, Namespace, RawTextHelpFormatter from umu import __version__ diff --git a/umu/umu_proton.py b/umu/umu_proton.py index 743afa711..87cdce916 100644 --- a/umu/umu_proton.py +++ b/umu/umu_proton.py @@ -1,38 +1,36 @@ import os -import sys from concurrent.futures import Future, ThreadPoolExecutor -from hashlib import sha512 -from http import HTTPStatus -from http.client import HTTPException -from json import loads +from hashlib import file_digest, sha512 +from http import HTTPMethod, HTTPStatus from pathlib import Path from re import split as resplit from shutil import move, rmtree -from ssl import SSLContext, create_default_context -from tarfile import open as tar_open from tempfile import TemporaryDirectory from typing import Any -from urllib.error import URLError -from urllib.request import Request, urlopen from filelock import FileLock +from urllib3.exceptions import HTTPError +from urllib3.exceptions import TimeoutError as TimeoutErrorUrllib3 +from urllib3.poolmanager import PoolManager +from urllib3.response import BaseHTTPResponse from umu.umu_consts import STEAM_COMPAT, UMU_CACHE, UMU_LOCAL from umu.umu_log import log -from umu.umu_util import run_zenity +from umu.umu_util import extract_tarfile, run_zenity, write_file_chunks -ssl_default_context: SSLContext = create_default_context() +SessionPools = tuple[ThreadPoolExecutor, PoolManager] -try: - from tarfile import tar_filter +# Unique subdir in /tmp +CacheTmpfs = Path - has_data_filter: bool = True -except ImportError: - has_data_filter: bool = False +# Unique subdir in $XDG_CACHE_HOME/umu +CacheSubdir = Path + +SessionCaches = tuple[CacheTmpfs, CacheSubdir] def get_umu_proton( - env: dict[str, str], thread_pool: ThreadPoolExecutor + env: dict[str, str], session_pools: SessionPools ) -> dict[str, str]: """Attempt to use the latest Proton when configured. @@ -54,17 +52,19 @@ def get_umu_proton( try: log.debug("Sending request to 'api.github.com'...") - assets = _fetch_releases() - except URLError: + assets = _fetch_releases(session_pools) + except HTTPError: log.debug("Network is unreachable") - # TODO: Handle interrupts on the move/extract operations with ( TemporaryDirectory() as tmp, TemporaryDirectory(dir=UMU_CACHE) as tmpcache, ): - tmpdirs: tuple[Path, Path] = (Path(tmp), Path(tmpcache)) - if _get_latest(env, STEAM_COMPAT, tmpdirs, assets, thread_pool) is env: + tmpdirs: SessionCaches = (Path(tmp), Path(tmpcache)) + if ( + _get_latest(env, STEAM_COMPAT, tmpdirs, assets, session_pools) + is env + ): return env if _get_from_steamcompat(env, STEAM_COMPAT) is env: return env @@ -74,11 +74,15 @@ def get_umu_proton( return env -def _fetch_releases() -> tuple[tuple[str, str], tuple[str, str]] | tuple[()]: +def _fetch_releases( + session_pools: SessionPools, +) -> tuple[tuple[str, str], tuple[str, str]] | tuple[()]: """Fetch the latest releases from the Github API.""" + resp: BaseHTTPResponse digest_asset: tuple[str, str] proton_asset: tuple[str, str] releases: list[dict[str, Any]] + _, http_pool = session_pools asset_count: int = 0 url: str = "https://api.github.com" repo: str = "/repos/Open-Wine-Components/umu-proton/releases/latest" @@ -91,14 +95,11 @@ def _fetch_releases() -> tuple[tuple[str, str], tuple[str, str]] | tuple[()]: if os.environ.get("PROTONPATH") == "GE-Proton": repo = "/repos/GloriousEggroll/proton-ge-custom/releases/latest" - with urlopen( # noqa: S310 - Request(f"{url}{repo}", headers=headers), # noqa: S310 - context=ssl_default_context, - ) as resp: - if resp.status != HTTPStatus.OK: - return () - releases = loads(resp.read().decode("utf-8")).get("assets", []) + resp = http_pool.request(HTTPMethod.GET, f"{url}{repo}", headers=headers) + if resp.status != HTTPStatus.OK: + return () + releases = resp.json().get("assets", []) for release in releases: if release["name"].endswith("sum"): digest_asset = ( @@ -129,15 +130,20 @@ def _fetch_releases() -> tuple[tuple[str, str], tuple[str, str]] | tuple[()]: def _fetch_proton( env: dict[str, str], - tmp: Path, + session_caches: SessionCaches, assets: tuple[tuple[str, str], tuple[str, str]], + session_pools: SessionPools, ) -> dict[str, str]: """Download the latest UMU-Proton or GE-Proton.""" + resp: BaseHTTPResponse + tmpfs, cache = session_caches + _, http_pool = session_pools proton_hash, proton_hash_url = assets[0] tarball, tar_url = assets[1] proton: str = tarball.removesuffix(".tar.gz") ret: int = 0 # Exit code from zenity digest: str = "" # Digest of the Proton archive + hashsum = sha512() # Verify the scheme from Github for resources if not tar_url.startswith("https:") or not proton_hash_url.startswith( @@ -152,19 +158,19 @@ def _fetch_proton( # Since the URLs are not hardcoded links, Ruff will flag the urlopen call # See https://github.com/astral-sh/ruff/issues/7918 log.info("Downloading %s...", proton_hash) - with ( - urlopen(proton_hash_url, context=ssl_default_context) as resp, # noqa: S310 - ): - if resp.status != HTTPStatus.OK: - err: str = ( - f"Unable to download {proton_hash}\n" - f"github.com returned the status: {resp.status}" - ) - raise HTTPException(err) - for line in resp.read().decode("utf-8").splitlines(): - if line.endswith(tarball): - digest = line.split(" ")[0] + resp = http_pool.request(HTTPMethod.GET, proton_hash_url) + if resp.status != HTTPStatus.OK: + err: str = ( + f"Unable to download {proton_hash}\n" + f"{resp.getheader('Host')} returned the status: {resp.status}" + ) + raise HTTPError(err) + + # Parse the Proton digest file + for line in resp.data.decode(encoding="utf-8").splitlines(): + if line.endswith(tarball): + digest = line.split(" ")[0] # Proton # Create a popup with zenity when the env var is set @@ -175,63 +181,79 @@ def _fetch_proton( "--silent", tar_url, "--output-dir", - str(tmp), + str(tmpfs), ] msg: str = f"Downloading {proton}..." ret = run_zenity(curl, opts, msg) if ret: - tmp.joinpath(tarball).unlink(missing_ok=True) + tmpfs.joinpath(tarball).unlink(missing_ok=True) log.warning("zenity exited with the status code: %s", ret) log.info("Retrying from Python...") if not os.environ.get("UMU_ZENITY") or ret: - log.info("Downloading %s...", tarball) - with ( - urlopen(tar_url, context=ssl_default_context) as resp, # noqa: S310 - ): - hashsum = sha512() + parts: Path = tmpfs.joinpath(f"{tarball}.parts") + cached_parts: Path = UMU_CACHE.joinpath(parts.name) + headers: dict[str, str] | None = None + + # Resume from our cached file, if we were interrupted previously + if cached_parts.is_file(): + log.info("Found '%s' in cache, resuming...", cached_parts.name) + headers = {"Range": f"bytes={cached_parts.stat().st_size}-"} + parts = cached_parts + # Rebuild our hashed progress + with parts.open("rb") as fp: + hashsum = file_digest(fp, hashsum.name) + else: + log.info("Downloading %s...", tarball) - # Crash here because without Proton, the launcher will not work - if resp.status != HTTPStatus.OK: - err: str = ( - f"Unable to download {tarball}\n" - f"github.com returned the status: {resp.status}" + resp = http_pool.request( + HTTPMethod.GET, tar_url, preload_content=False, headers=headers + ) + + # Bail out for unexpected status codes + if resp.status not in { + HTTPStatus.OK, + HTTPStatus.PARTIAL_CONTENT, + HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, + }: + err: str = ( + f"{resp.getheader('Host')} returned the status: {resp.status}" + ) + raise HTTPError(err) + + # Only write our file if we're resuming or downloading first time + if resp.status != HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE: + try: + log.debug("Writing: %s", parts) + hashsum = write_file_chunks(parts, resp, hashsum) + except TimeoutErrorUrllib3: + log.error("Aborting Proton install due to network error") + log.info( + "Moving '%s' to cache for future resumption", parts.name ) - raise HTTPException(err) + log.debug("Moving: %s -> %s", parts, cache.parent) + move(parts, cache.parent) + raise - with tmp.joinpath(tarball).open(mode="ab+", buffering=0) as file: - chunk_size: int = 64 * 1024 # 64 KB - buffer: bytearray = bytearray(chunk_size) - view: memoryview = memoryview(buffer) - while size := resp.readinto(buffer): - file.write(view[:size]) - hashsum.update(view[:size]) + # Release conn to the pool + resp.release_conn() - if hashsum.hexdigest() != digest: - err: str = f"Digest mismatched: {tarball}" - raise ValueError(err) + log.debug("Digest: %s", digest) + if hashsum.hexdigest() != digest: + parts.unlink(missing_ok=True) + err: str = ( + f"Digest mismatched: {tarball}\n" + "Possible reason: cached file corrupted or failed to acquire upstream digest\n" + f"Link: {tar_url}" + ) + raise ValueError(err) - log.info("%s: SHA512 is OK", tarball) + log.info("%s: SHA512 is OK", tarball) return env -def _extract_dir(file: Path) -> None: - """Extract from a path to another location.""" - with tar_open(file, "r:gz") as tar: - if has_data_filter: - log.debug("Using filter for archive") - tar.extraction_filter = tar_filter - else: - log.warning("Python: %s", sys.version) - log.warning("Using no data filter for archive") - log.warning("Archive will be extracted insecurely") - log.info("Extracting %s...", file.name) - log.debug("Source: %s", str(file).removesuffix(".tar.gz")) - tar.extractall(path=file.parent) # noqa: S202 - - def _get_from_steamcompat( env: dict[str, str], steam_compat: Path ) -> dict[str, str] | None: @@ -272,9 +294,9 @@ def _get_from_steamcompat( def _get_latest( env: dict[str, str], steam_compat: Path, - tmpdirs: tuple[Path, Path], + session_caches: SessionCaches, assets: tuple[tuple[str, str], tuple[str, str]] | tuple[()], - thread_pool: ThreadPoolExecutor, + session_pools: SessionPools, ) -> dict[str, str] | None: """Download the latest Proton for new installs. @@ -325,14 +347,14 @@ def _get_latest( raise FileExistsError # Download the archive to a temporary directory - _fetch_proton(env, tmpdirs[0], assets) + _fetch_proton(env, session_caches, assets, session_pools) # Extract the archive then move the directory - _install_proton(tarball, tmpdirs, steam_compat, thread_pool) + _install_proton(tarball, session_caches, steam_compat, session_pools) except ( ValueError, KeyboardInterrupt, - HTTPException, + HTTPError, ) as e: log.exception(e) return None @@ -378,9 +400,9 @@ def _update_proton( def _install_proton( tarball: str, - tmpdirs: tuple[Path, Path], + session_caches: SessionCaches, steam_compat: Path, - thread_pool: ThreadPoolExecutor, + session_pools: SessionPools, ) -> None: """Install a Proton directory to Steam's compatibilitytools.d. @@ -391,14 +413,15 @@ def _install_proton( step, where old builds will be removed in parallel. """ future: Future | None = None + tmpfs, cache = session_caches + thread_pool, _ = session_pools + parts: str = f"{tarball}.parts" + cached_parts: Path = cache.parent.joinpath(f"{tarball}.parts") version: str = ( "GE-Proton" if os.environ.get("PROTONPATH") == "GE-Proton" else "UMU-Proton" ) - proton: str = tarball.removesuffix(".tar.gz") - archive_path: str = f"{tmpdirs[0]}/{tarball}" - proton_path: str = f"{tmpdirs[1]}/{proton}" # TODO: Refactor when differential updates are implemented. # Remove all previous builds when the build is UMU-Proton @@ -410,19 +433,43 @@ def _install_proton( ] future = thread_pool.submit(_update_proton, protons, thread_pool) - # Move downloaded file from tmpfs to cache to avoid high memory usage - log.debug("Moving: %s -> %s", archive_path, tmpdirs[1]) - move(archive_path, tmpdirs[1]) - - _extract_dir(tmpdirs[1] / tarball) + # Move our file and extract within our cache + if cached_parts.is_file(): + # In this case, arc is already in cache and checksum'd + log.debug( + "Moving: %s -> %s", cached_parts, cached_parts.with_suffix("") + ) + move(cached_parts, cached_parts.with_suffix("")) + # Move the archive to our unique subdir + log.debug("Moving: %s -> %s", cached_parts.with_suffix(""), cache) + move(cached_parts.with_suffix(""), cache) + log.info("Extracting %s...", tarball) + # Extract within the subdir + extract_tarfile( + cache.joinpath(tarball), cache.joinpath(tarball).parent + ) + else: + # The archive is in tmpfs. Remove the parts extension + move(tmpfs.joinpath(parts), tmpfs.joinpath(tarball)) + move(tmpfs.joinpath(tarball), cache) + log.info("Extracting %s...", tarball) + extract_tarfile( + cache.joinpath(tarball), cache.joinpath(tarball).parent + ) # Move decompressed archive to compatibilitytools.d - log.info("'%s' -> '%s'", proton_path, steam_compat) - move(proton_path, steam_compat) + log.info( + "%s -> %s", + cache.joinpath(tarball.removesuffix(".tar.gz")), + steam_compat, + ) + move(cache.joinpath(tarball.removesuffix(".tar.gz")), steam_compat) steam_compat.joinpath("UMU-Latest").unlink(missing_ok=True) - steam_compat.joinpath("UMU-Latest").symlink_to(proton) - log.debug("Linking: UMU-Latest -> %s", proton) + steam_compat.joinpath("UMU-Latest").symlink_to( + tarball.removesuffix(".tar.gz") + ) + log.debug("Linking: UMU-Latest -> %s", tarball.removesuffix(".tar.gz")) if future: future.result() diff --git a/umu/umu_run.py b/umu/umu_run.py index 515e9f1c7..d90c6a862 100755 --- a/umu/umu_run.py +++ b/umu/umu_run.py @@ -17,14 +17,19 @@ except ModuleNotFoundError: from importlib.abc import Traversable + from pathlib import Path from pwd import getpwuid from re import match -from socket import AF_INET, SOCK_DGRAM, gaierror, socket +from socket import AF_INET, SOCK_DGRAM, socket from subprocess import Popen from typing import Any from filelock import FileLock +from urllib3 import PoolManager, Retry +from urllib3.exceptions import MaxRetryError, NewConnectionError +from urllib3.exceptions import TimeoutError as TimeoutErrorUrllib3 +from urllib3.util import Timeout from Xlib import X, Xatom, display from Xlib.error import DisplayConnectionError from Xlib.protocol.request import GetProperty @@ -47,10 +52,15 @@ from umu.umu_util import ( get_libc, get_library_paths, + has_umu_setup, is_installed_verb, xdisplay, ) +NET_TIMEOUT = 5.0 + +NET_RETRIES = 1 + def setup_pfx(path: str) -> None: """Prepare a Proton compatible WINE prefix.""" @@ -84,7 +94,7 @@ def setup_pfx(path: str) -> None: def check_env( - env: dict[str, str], thread_pool: ThreadPoolExecutor + env: dict[str, str], session_pools: tuple[ThreadPoolExecutor, PoolManager] ) -> dict[str, str] | dict[str, Any]: """Before executing a game, check for environment variables and set them. @@ -129,11 +139,11 @@ def check_env( # GE-Proton if os.environ.get("PROTONPATH") == "GE-Proton": - get_umu_proton(env, thread_pool) + get_umu_proton(env, session_pools) if "PROTONPATH" not in os.environ: os.environ["PROTONPATH"] = "" - get_umu_proton(env, thread_pool) + get_umu_proton(env, session_pools) env["PROTONPATH"] = os.environ["PROTONPATH"] @@ -768,54 +778,71 @@ def umu_run(args: Namespace | tuple[str, list[str]]) -> int: log.info("umu-launcher version %s (%s)", __version__, sys.version) - with ThreadPoolExecutor() as thread_pool: - try: - # Test the network environment and fail early if the user is trying - # to run umu-run offline because an internet connection is required - # for new setups - log.debug("Connecting to '1.1.1.1'...") - with socket(AF_INET, SOCK_DGRAM) as sock: - sock.settimeout(5) - sock.connect(("1.1.1.1", 53)) - prereq = True - except TimeoutError: # Request to a server timed out - if not UMU_LOCAL.exists() or not any(UMU_LOCAL.iterdir()): - err: str = ( - "umu has not been setup for the user\n" - "An internet connection is required to setup umu" - ) - raise RuntimeError(err) - log.debug("Request timed out") - prereq = True - except OSError as e: # No internet - if e.errno != ENETUNREACH: - raise - if not UMU_LOCAL.exists() or not any(UMU_LOCAL.iterdir()): - err: str = ( - "umu has not been setup for the user\n" - "An internet connection is required to setup umu" - ) - raise RuntimeError(err) - log.debug("Network is unreachable") - prereq = True - - if not prereq: + # Test the network environment and fail early if the user is trying + # to run umu-run offline because an internet connection is required + # for new setups + try: + log.debug("Connecting to '1.1.1.1'...") + with socket(AF_INET, SOCK_DGRAM) as sock: + sock.settimeout(5) + sock.connect(("1.1.1.1", 53)) + prereq = True + except TimeoutError: # Request to a server timed out + if not has_umu_setup(): + err: str = ( + "umu has not been setup for the user\n" + "An internet connection is required to setup umu" + ) + raise RuntimeError(err) + log.debug("Request timed out") + prereq = True + except OSError as e: # No internet + if e.errno != ENETUNREACH: + raise + if not has_umu_setup(): err: str = ( "umu has not been setup for the user\n" "An internet connection is required to setup umu" ) raise RuntimeError(err) + log.debug("Network is unreachable") + prereq = True + if not prereq: + err: str = ( + "umu has not been setup for the user\n" + "An internet connection is required to setup umu" + ) + raise RuntimeError(err) + + # Opt to use the system's native CA bundle rather than certifi's + with suppress(ModuleNotFoundError): + import truststore + + truststore.inject_into_ssl() + + # Default to retrying requests once, while using urllib's defaults + retries: Retry = Retry(total=NET_RETRIES, redirect=True) + # Default to a strict 5 second timeouts throughout + timeout: Timeout = Timeout(connect=NET_TIMEOUT, read=NET_TIMEOUT) + with ( + ThreadPoolExecutor() as thread_pool, + PoolManager(timeout=timeout, retries=retries) as http_pool, + ): + session_pools: tuple[ThreadPoolExecutor, PoolManager] = ( + thread_pool, + http_pool, + ) # Setup the launcher and runtime files future: Future = thread_pool.submit( - setup_umu, root, UMU_LOCAL, __runtime_version__, thread_pool + setup_umu, root, UMU_LOCAL, __runtime_version__, session_pools ) if isinstance(args, Namespace): env, opts = set_env_toml(env, args) else: opts = args[1] # Reference the executable options - check_env(env, thread_pool) + check_env(env, session_pools) UMU_LOCAL.mkdir(parents=True, exist_ok=True) @@ -834,16 +861,20 @@ def umu_run(args: Namespace | tuple[str, list[str]]) -> int: try: future.result() - except gaierror as e: - # Network address-related errors in the request to repo.steampowered.com - # At this point, the user's network was reachable on launch, but - # the network suddenly became unreliable so the request failed. - log.exception(e) - except OSError as e: - # Similar situation as above, but the host was resolved yet the - # network suddenly became unreachable in the request to repo.steampowered.com. - if e.errno != ENETUNREACH: - raise + except ( + # Network errors + MaxRetryError, + NewConnectionError, + TimeoutErrorUrllib3, + # Digest mismatched for runtime + ValueError, + ): + if not has_umu_setup(): + err: str = ( + "umu has not been setup for the user\n" + "An internet connection is required to setup umu" + ) + raise RuntimeError(err) log.debug("Network is unreachable") # Exit if the winetricks verb is already installed to avoid reapplying it diff --git a/umu/umu_runtime.py b/umu/umu_runtime.py index f886e2914..3ab3ab3c9 100644 --- a/umu/umu_runtime.py +++ b/umu/umu_runtime.py @@ -1,36 +1,34 @@ import os -import sys from collections.abc import Callable from concurrent.futures import Future, ThreadPoolExecutor -from hashlib import sha256 -from http.client import HTTPException, HTTPResponse, HTTPSConnection +from hashlib import file_digest, sha256 try: from importlib.resources.abc import Traversable except ModuleNotFoundError: from importlib.abc import Traversable -from http import HTTPStatus +from http import HTTPMethod, HTTPStatus from pathlib import Path from secrets import token_urlsafe from shutil import move, rmtree from subprocess import run -from tarfile import open as taropen from tempfile import TemporaryDirectory, mkdtemp from filelock import FileLock +from urllib3.exceptions import HTTPError +from urllib3.exceptions import TimeoutError as TimeoutErrorUrllib3 +from urllib3.poolmanager import PoolManager +from urllib3.response import BaseHTTPResponse from umu.umu_consts import UMU_CACHE, UMU_LOCAL from umu.umu_log import log -from umu.umu_util import https_connection, run_zenity - -try: - from tarfile import tar_filter - - has_data_filter: bool = True -except ImportError: - has_data_filter: bool = False - +from umu.umu_util import ( + extract_tarfile, + has_umu_setup, + run_zenity, + write_file_chunks, +) Codename = str @@ -38,6 +36,8 @@ RuntimeVersion = tuple[Codename, Variant] +SessionPools = tuple[ThreadPoolExecutor, PoolManager] + def create_shim(file_path: Path | None = None): """Create a shell script shim at the specified file path. @@ -87,13 +87,12 @@ def create_shim(file_path: Path | None = None): def _install_umu( runtime_ver: RuntimeVersion, - thread_pool: ThreadPoolExecutor, - client_session: HTTPSConnection, + session_pools: SessionPools, ) -> None: - resp: HTTPResponse + resp: BaseHTTPResponse tmp: Path = Path(mkdtemp()) ret: int = 0 # Exit code from zenity - # Codename for the runtime (e.g., 'sniper') + thread_pool, http_pool = session_pools codename, variant = runtime_ver # Archive containing the runtime archive: str = f"SteamLinuxRuntime_{codename}.tar.xz" @@ -102,7 +101,11 @@ def _install_umu( "/snapshots/latest-container-runtime-public-beta" ) token: str = f"?versions={token_urlsafe(16)}" - log.debug("URL: %s", base_url) + host: str = "repo.steampowered.com" + parts: Path = tmp.joinpath(f"{archive}.parts") + log.debug("Using endpoint '%s' for requests", base_url) + + UMU_CACHE.mkdir(parents=True, exist_ok=True) # Download the runtime and optionally create a popup with zenity if os.environ.get("UMU_ZENITY") == "1": @@ -125,112 +128,155 @@ def _install_umu( if not os.environ.get("UMU_ZENITY") or ret: digest: str = "" + buildid: str = "" endpoint: str = ( f"/steamrt-images-{codename}" "/snapshots/latest-container-runtime-public-beta" ) hashsum = sha256() + headers: dict[str, str] | None = None + cached_parts: Path # Get the digest for the runtime archive - client_session.request("GET", f"{endpoint}/SHA256SUMS{token}") + resp = http_pool.request( + HTTPMethod.GET, f"{host}{endpoint}/SHA256SUMS{token}" + ) + if resp.status != HTTPStatus.OK: + err: str = ( + f"{resp.getheader('Host')} returned the status: {resp.status}" + ) + raise HTTPError(err) - with client_session.getresponse() as resp: - if resp.status != HTTPStatus.OK: - err: str = ( - f"repo.steampowered.com returned the status: {resp.status}" - ) - raise HTTPException(err) + # Parse SHA256SUMS + for line in resp.data.decode(encoding="utf-8").splitlines(): + if line.endswith(archive): + digest = line.split(" ")[0] - # Parse SHA256SUMS - for line in resp.read().decode("utf-8").splitlines(): - if line.endswith(archive): - digest = line.split(" ")[0] - break + # Get BUILD_ID.txt. We'll use the value to identify the file when cached. + # This will guarantee we'll be picking up the correct file when resuming + resp = http_pool.request( + HTTPMethod.GET, f"{host}{endpoint}/BUILD_ID.txt{token}" + ) + if resp.status != HTTPStatus.OK: + err: str = ( + f"{resp.getheader('Host')} returned the status: {resp.status}" + ) + raise HTTPError(err) - # Download the runtime - log.info("Downloading %s (latest), please wait...", variant) - client_session.request("GET", f"{endpoint}/{archive}{token}") - - with ( - client_session.getresponse() as resp, - tmp.joinpath(archive).open(mode="ab+", buffering=0) as file, - ): - if resp.status != HTTPStatus.OK: - err: str = ( - f"repo.steampowered.com returned the status: {resp.status}" - ) - raise HTTPException(err) + buildid = resp.data.decode(encoding="utf-8").strip() + log.debug("BUILD_ID: %s", buildid) + + # Extend our variables with the BUILD_ID + log.debug( + "Renaming: %s -> %s", parts, parts.with_suffix(f".{buildid}.parts") + ) + parts = parts.with_suffix(f".{buildid}.parts") + cached_parts = UMU_CACHE.joinpath(f"{archive}.{buildid}.parts") + + # Resume from our cached file, if we were interrupted previously + if cached_parts.is_file(): + log.info("Found '%s' in cache, resuming...", cached_parts.name) + headers = {"Range": f"bytes={cached_parts.stat().st_size}-"} + parts = cached_parts + # Rebuild our hashed progress + with parts.open("rb") as fp: + hashsum = file_digest(fp, hashsum.name) + else: + log.info("Downloading %s (latest), please wait...", variant) + + resp = http_pool.request( + HTTPMethod.GET, + f"{host}{endpoint}/{archive}{token}", + preload_content=False, + headers=headers, + ) - chunk_size: int = 64 * 1024 # 64 KB - buffer: bytearray = bytearray(chunk_size) - view: memoryview = memoryview(buffer) - while size := resp.readinto(buffer): - file.write(view[:size]) - hashsum.update(view[:size]) + # Bail out for unexpected status codes + if resp.status not in { + HTTPStatus.OK, + HTTPStatus.PARTIAL_CONTENT, + HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, + }: + err: str = ( + f"{resp.getheader('Host')} returned the status: {resp.status}" + ) + raise HTTPError(err) - # Verify the runtime digest - if hashsum.hexdigest() != digest: - err: str = f"Digest mismatched: {archive}" - raise ValueError(err) + # Download the runtime + if resp.status != HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE: + try: + log.debug("Writing: %s", parts) + hashsum = write_file_chunks(parts, resp, hashsum) + except TimeoutErrorUrllib3: + log.error("Aborting steamrt install due to network error") + log.info( + "Moving '%s' to cache for future resumption", parts.name + ) + move(parts, UMU_CACHE) + raise + + # Release conn to the pool + resp.release_conn() + + log.debug("Digest: %s", digest) + if hashsum.hexdigest() != digest: + # Remove our cached file because it had probably got corrupted + # somehow since the last launch. Abort the update then continue + # to launch using existing runtime + cached_parts.unlink(missing_ok=True) + err: str = ( + f"Digest mismatched: {archive}\n" + "Possible reason: cached file corrupted or failed to acquire upstream digest\n" + f"Link: {host}{endpoint}/{archive}" + ) + raise ValueError(err) log.info("%s: SHA256 is OK", archive) - # Open the tar file and move the files - log.debug("Opening: %s", tmp.joinpath(archive)) + # Remove the .parts and BUILD_ID suffix + parts = parts.rename( + parts.parent / parts.name.removesuffix(f".{buildid}.parts") + ) - UMU_CACHE.mkdir(parents=True, exist_ok=True) + # Open the tar file and move the files + log.debug("Opening: %s", parts) with TemporaryDirectory(dir=UMU_CACHE) as tmpcache: + futures: list[Future] = [] + var: Path = UMU_LOCAL.joinpath("var") log.debug("Created: %s", tmpcache) - log.debug("Moving: %s -> %s", tmp.joinpath(archive), tmpcache) - move(tmp.joinpath(archive), tmpcache) - - with ( - taropen(f"{tmpcache}/{archive}", "r:xz") as tar, - ): - futures: list[Future] = [] - - if has_data_filter: - log.debug("Using filter for archive") - tar.extraction_filter = tar_filter - else: - log.warning("Python: %s", sys.version) - log.warning("Using no data filter for archive") - log.warning("Archive will be extracted insecurely") - - # Ensure the target directory exists - UMU_LOCAL.mkdir(parents=True, exist_ok=True) - - # Extract the entirety of the archive w/ or w/o the data filter - log.debug( - "Extracting: %s -> %s", f"{tmpcache}/{archive}", tmpcache - ) - tar.extractall(path=tmpcache) # noqa: S202 - - # Move the files to the correct location - source_dir: Path = Path(tmpcache, f"SteamLinuxRuntime_{codename}") - var: Path = UMU_LOCAL.joinpath("var") - log.debug("Source: %s", source_dir) - log.debug("Destination: %s", UMU_LOCAL) - - # Move each file to the dest dir, overwriting if exists - futures.extend( - [ - thread_pool.submit(_move, file, source_dir, UMU_LOCAL) - for file in source_dir.glob("*") - ] - ) + log.debug("Moving: %s -> %s", parts, tmpcache) + move(parts, tmpcache) + + # Ensure the target directory exists + UMU_LOCAL.mkdir(parents=True, exist_ok=True) + log.debug("Extracting: %s -> %s", f"{tmpcache}/{archive}", tmpcache) + extract_tarfile(Path(tmpcache, archive), Path(tmpcache)) + + # Move the files to the correct location + source_dir: Path = Path(tmpcache, f"SteamLinuxRuntime_{codename}") + var: Path = UMU_LOCAL.joinpath("var") + log.debug("Source: %s", source_dir) + log.debug("Destination: %s", UMU_LOCAL) + + # Move each file to the dest dir, overwriting if exists + futures.extend( + [ + thread_pool.submit(_move, file, source_dir, UMU_LOCAL) + for file in source_dir.glob("*") + ] + ) - if var.is_dir(): - log.debug("Removing: %s", var) - # Remove the variable directory to avoid Steam Linux Runtime - # related errors when creating it. Supposedly, it only happens - # when going from umu-launcher 0.1-RC4 -> 1.1.1+ - # See https://github.com/Open-Wine-Components/umu-launcher/issues/213#issue-2576708738 - thread_pool.submit(rmtree, str(var)) + if var.is_dir(): + log.debug("Removing: %s", var) + # Remove the variable directory to avoid Steam Linux Runtime + # related errors when creating it. Supposedly, it only happens + # when going from umu-launcher 0.1-RC4 -> 1.1.1+ + # See https://github.com/Open-Wine-Components/umu-launcher/issues/213#issue-2576708738 + thread_pool.submit(rmtree, str(var)) - for future in futures: - future.result() + for future in futures: + future.result() # Rename _v2-entry-point log.debug("Renaming: _v2-entry-point -> umu") @@ -246,25 +292,23 @@ def setup_umu( root: Traversable, local: Path, runtime_ver: RuntimeVersion, - thread_pool: ThreadPoolExecutor, + session_pools: SessionPools, ) -> None: """Install or update the runtime for the current user.""" log.debug("Root: %s", root) log.debug("Local: %s", local) - host: str = "repo.steampowered.com" # New install or umu dir is empty - if not local.exists() or not any(local.iterdir()): + if not has_umu_setup(local): log.debug("New install detected") log.info("Setting up Unified Launcher for Windows Games on Linux...") local.mkdir(parents=True, exist_ok=True) - with https_connection(host) as client_session: - _restore_umu( - runtime_ver, - thread_pool, - lambda: local.joinpath("umu").is_file(), - client_session, - ) + _restore_umu( + local, + runtime_ver, + session_pools, + lambda: local.joinpath("umu").is_file(), + ) log.info("Using %s (latest)", runtime_ver[1]) return @@ -272,15 +316,13 @@ def setup_umu( log.info("%s updates disabled, skipping", runtime_ver[1]) return - with https_connection(host) as client_session: - _update_umu(local, runtime_ver, thread_pool, client_session) + _update_umu(local, runtime_ver, session_pools) def _update_umu( local: Path, runtime_ver: RuntimeVersion, - thread_pool: ThreadPoolExecutor, - client_session: HTTPSConnection, + session_pools: SessionPools, ) -> None: """For existing installations, check for updates to the runtime. @@ -288,20 +330,21 @@ def _update_umu( the local VERSIONS.txt against the remote one. """ runtime: Path - resp: HTTPResponse + resp: BaseHTTPResponse + _, http_pool = session_pools codename, variant = runtime_ver endpoint: str = ( f"/steamrt-images-{codename}" "/snapshots/latest-container-runtime-public-beta" ) + # Create a token and append it to the URL to avoid the Cloudflare cache + # Avoids infinite updates to the runtime each launch + # See https://github.com/Open-Wine-Components/umu-launcher/issues/188 token: str = f"?version={token_urlsafe(16)}" + host: str = "repo.steampowered.com" log.debug("Existing install detected") log.debug("Using container runtime '%s' aka '%s'", variant, codename) - log.debug( - "Checking updates for '%s', requesting '%s'...", - variant, - client_session.host, - ) + log.debug("Checking updates for '%s'...", variant) # Find the runtime directory (e.g., sniper_platform_0.20240530.90143) # Assume the directory begins with the variant @@ -310,27 +353,28 @@ def _update_umu( file for file in local.glob(f"{codename}*") if file.is_dir() ) except ValueError: - log.warning("*_platform_* directory missing in '%s'", local) + log.critical("*_platform_* directory missing in '%s'", local) log.info("Restoring Runtime Platform...") _restore_umu( + local, runtime_ver, - thread_pool, + session_pools, lambda: len( [file for file in local.glob(f"{codename}*") if file.is_dir()] ) > 0, - client_session, ) return + # Restore the runtime when pressure-vessel is missing if not local.joinpath("pressure-vessel").is_dir(): - log.warning("pressure-vessel directory missing in '%s'", local) + log.critical("pressure-vessel directory missing in '%s'", local) log.info("Restoring Runtime Platform...") _restore_umu( + local, runtime_ver, - thread_pool, + session_pools, lambda: local.joinpath("pressure-vessel").is_dir(), - client_session, ) return @@ -338,115 +382,36 @@ def _update_umu( # When the file is missing, the request for the image will need to be made # to the endpoint of the specific snapshot if not local.joinpath("VERSIONS.txt").is_file(): - url: str - release: Path = runtime.joinpath("files", "lib", "os-release") - versions: str = f"SteamLinuxRuntime_{codename}.VERSIONS.txt" - - log.warning("VERSIONS.txt file missing in '%s'", local) - - # Restore the runtime if os-release is missing, otherwise pressure - # vessel will crash when creating the variable directory - if not release.is_file(): - log.warning("os-release file missing in *_platform_*") - log.warning("Runtime Platform corrupt") - log.info("Restoring Runtime Platform...") + log.critical("VERSIONS.txt file missing in '%s'", local) + platformid: str | None = _restore_umu_platformid( + runtime, runtime_ver, session_pools + ) + if platformid is None: _restore_umu( + local, runtime_ver, - thread_pool, + session_pools, lambda: local.joinpath("VERSIONS.txt").is_file(), - client_session, ) return + local.joinpath("VERSIONS.txt").write_text(platformid) - # Get the BUILD_ID value in os-release - with release.open(mode="r", encoding="utf-8") as file: - for line in file: - if line.startswith("BUILD_ID"): - # Get the value after 'BUILD_ID=' and strip the quotes - build_id: str = ( - line.removeprefix("BUILD_ID=").rstrip().strip('"') - ) - url = ( - f"/steamrt-images-{codename}" f"/snapshots/{build_id}" - ) - break - - client_session.request("GET", f"{url}{token}") - - with client_session.getresponse() as resp: - # Handle the redirect - if resp.status == HTTPStatus.MOVED_PERMANENTLY: - location: str = resp.getheader("Location", "") - log.debug("Location: %s", resp.getheader("Location")) - # The stdlib requires reading the entire response body before - # making another request - resp.read() - - # Make a request to the new location - client_session.request("GET", f"{location}/{versions}{token}") - with client_session.getresponse() as resp_redirect: - if resp_redirect.status != HTTPStatus.OK: - log.warning( - "repo.steampowered.com returned the status: %s", - resp_redirect.status, - ) - return - local.joinpath("VERSIONS.txt").write_text( - resp.read().decode() - ) - - # Update the runtime if necessary by comparing VERSIONS.txt to the remote. - # repo.steampowered currently sits behind a Cloudflare proxy, which may - # respond with cf-cache-status: HIT in the header for subsequent requests - # indicating the response was found in the cache and was returned. Valve - # has control over the CDN's cache control behavior, so we must not assume - # all of the cache will be purged after new files are uploaded. Therefore, - # always avoid the cache by appending a unique query to the URI to avoid - # redownloading the runtime each launch - # See https://github.com/Open-Wine-Components/umu-launcher/issues/188 - url: str = f"{endpoint}/SteamLinuxRuntime_{codename}.VERSIONS.txt{token}" - client_session.request("GET", url) - - # Attempt to compare the digests - with client_session.getresponse() as resp: - if resp.status != HTTPStatus.OK: - log.warning( - "repo.steampowered.com returned the status: %s", resp.status - ) - return + # Fetch the version file + url: str = ( + f"{host}{endpoint}/SteamLinuxRuntime_{codename}.VERSIONS.txt{token}" + ) + log.debug("Sending request to '%s' for 'VERSIONS.txt'...", url) + resp = http_pool.request(HTTPMethod.GET, url) + if resp.status != HTTPStatus.OK: + log.error( + "%s returned the status: %s", resp.getheader("Host"), resp.status + ) + return - steamrt_latest_digest: bytes = sha256(resp.read()).digest() - steamrt_local_digest: bytes = sha256( - local.joinpath("VERSIONS.txt").read_bytes() - ).digest() - steamrt_versions: Path = local.joinpath("VERSIONS.txt") - - log.debug("Source: %s", url) - log.debug("Digest: %s", steamrt_latest_digest) - log.debug("Source: %s", steamrt_versions) - log.debug("Digest: %s", steamrt_local_digest) - - if steamrt_latest_digest != steamrt_local_digest: - lock: FileLock = FileLock(f"{local}/umu.lock") - log.info("Updating %s to latest...", variant) - log.debug("Acquiring file lock '%s'...", lock.lock_file) - - with lock: - log.debug("Acquired file lock '%s'", lock.lock_file) - # Once another process acquires the lock, check if the latest - # runtime has already been downloaded - if ( - steamrt_latest_digest - == sha256(steamrt_versions.read_bytes()).digest() - ): - log.debug("Released file lock '%s'", lock.lock_file) - return - _install_umu(runtime_ver, thread_pool, client_session) - log.debug("Removing: %s", runtime) - rmtree(str(runtime)) - log.debug("Released file lock '%s'", lock.lock_file) + # Update our runtime + _update_umu_platform(local, runtime, runtime_ver, session_pools, resp) - # Restore shim + # Restore shim if missing if not local.joinpath("umu-shim").is_file(): create_shim() @@ -490,8 +455,8 @@ def check_runtime(src: Path, runtime_ver: RuntimeVersion) -> int: file for file in src.glob(f"{codename}*") if file.is_dir() ) except ValueError: - log.warning("%s validation failed", variant) - log.warning("Could not find *_platform_* in '%s'", src) + log.critical("%s validation failed", variant) + log.critical("Could not find *_platform_* in '%s'", src) return ret if not pv_verify.is_file(): @@ -520,16 +485,100 @@ def check_runtime(src: Path, runtime_ver: RuntimeVersion) -> int: def _restore_umu( + local: Path, runtime_ver: RuntimeVersion, - thread_pool: ThreadPoolExecutor, + session_pools: SessionPools, callback_fn: Callable[[], bool], - client_session: HTTPSConnection, ) -> None: - with FileLock(f"{UMU_LOCAL}/umu.lock") as lock: + with FileLock(f"{local}/umu.lock") as lock: log.debug("Acquired file lock '%s'...", lock.lock_file) if callback_fn(): log.debug("Released file lock '%s'", lock.lock_file) log.info("%s was restored", runtime_ver[1]) return - _install_umu(runtime_ver, thread_pool, client_session) + _install_umu(runtime_ver, session_pools) log.debug("Released file lock '%s'", lock.lock_file) + + +def _restore_umu_platformid( + runtime_base: Path, + runtime_ver: RuntimeVersion, + session_pools: SessionPools, +) -> None | str: + url: str = "" + _, http_pool = session_pools + codename, _ = runtime_ver + release: Path = runtime_base.joinpath("files", "lib", "os-release") + versions: str = f"SteamLinuxRuntime_{codename}.VERSIONS.txt" + host: str = "repo.steampowered.com" + + # Restore the runtime if os-release is missing, otherwise pressure + # vessel will crash when creating the variable directory + if not release.is_file(): + log.critical("os-release file missing in *_platform_*") + log.critical("Runtime Platform corrupt") + log.info("Restoring Runtime Platform...") + return None + + # Get the BUILD_ID value in os-release so we can get VERSIONS.txt + with release.open(mode="r", encoding="utf-8") as file: + for line in file: + if line.startswith("BUILD_ID"): + # Get the value after 'BUILD_ID=' and strip the quotes + build_id: str = ( + line.removeprefix("BUILD_ID=").rstrip().strip('"') + ) + url = f"/steamrt-images-{codename}" f"/snapshots/{build_id}" + break + + if not url: + log.critical("Failed to parse os-release for BUILD_ID in *_platform_*") + log.critical("Runtime Platform corrupt") + log.info("Restoring Runtime Platform...") + return None + + # Make the request to the VERSIONS.txt endpoint. It's fine to hit the + # cache for this endpoint, as it differs to the latest-beta endpoint + resp = http_pool.request(HTTPMethod.GET, f"{host}{url}{versions}") + if resp.status != HTTPStatus.OK: + log.error( + "%s returned the status: %s", + resp.getheader("Host"), + resp.status, + ) + return None + + # False positive from mypy. + return resp.data.decode(encoding="utf-8") # type: ignore + + +def _update_umu_platform( + local: Path, + runtime: Path, + runtime_ver: RuntimeVersion, + session_pools: SessionPools, + resp: BaseHTTPResponse, +) -> None: + _, variant = runtime_ver + latest: bytes = sha256(resp.data).digest() + current: bytes = sha256( + local.joinpath("VERSIONS.txt").read_bytes() + ).digest() + versions: Path = local.joinpath("VERSIONS.txt") + lock: FileLock = FileLock(f"{local}/umu.lock") + + # Compare our version file to upstream's, updating if different + if latest != current: + log.info("Updating %s to latest...", variant) + log.debug("Acquiring file lock '%s'...", lock.lock_file) + with lock: + log.debug("Acquired file lock '%s'", lock.lock_file) + # Once another process acquires the lock, check if the latest + # runtime has already been downloaded + if latest == sha256(versions.read_bytes()).digest(): + log.debug("Released file lock '%s'", lock.lock_file) + return + _install_umu(runtime_ver, session_pools) + log.debug("Removing: %s", runtime) + rmtree(str(runtime)) + log.debug("Released file lock '%s'", lock.lock_file) diff --git a/umu/umu_test.py b/umu/umu_test.py index a25ffe915..4f751fcde 100644 --- a/umu/umu_test.py +++ b/umu/umu_test.py @@ -1,4 +1,5 @@ import argparse +import hashlib import os import re import sys @@ -11,8 +12,13 @@ from pwd import getpwuid from shutil import copy, copytree, move, rmtree from subprocess import CompletedProcess -from tempfile import TemporaryDirectory, mkdtemp -from unittest.mock import MagicMock, patch +from tempfile import ( + NamedTemporaryFile, + TemporaryDirectory, + TemporaryFile, + mkdtemp, +) +from unittest.mock import MagicMock, Mock, patch from Xlib.display import Display from Xlib.error import DisplayConnectionError @@ -91,6 +97,8 @@ def setUp(self): # Wine prefix self.test_winepfx = Path("./tmp.AlfLPDhDvA") self.test_runtime_version = ("sniper", "steamrt3") + # Thread pool and connection pool instances + self.test_session_pools = (MagicMock(), MagicMock()) # /usr self.test_usr = Path("./tmp.QnZRGFfnqH") @@ -183,6 +191,316 @@ def tearDown(self): if self.test_cache_home.exists(): rmtree(self.test_cache_home.as_posix()) + def test_restore_umu_cb_false(self): + """Test _restore_umu when the callback evaluates to False.""" + mock_cb = Mock(return_value=False) + result = MagicMock() + + with ( + TemporaryDirectory() as file, + patch.object(umu_runtime, "_install_umu"), + ): + mock_local = Path(file) + mock_runtime_ver = ("sniper", "steamrt3") + mock_session_pools = (MagicMock(), MagicMock()) + result = umu_runtime._restore_umu( + mock_local, mock_runtime_ver, mock_session_pools, mock_cb + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + self.assertTrue( + mock_cb.mock_calls, + "Expected callback to be called", + ) + + def test_restore_umu(self): + """Test _restore_umu.""" + mock_cb = Mock(return_value=True) + result = MagicMock() + + with TemporaryDirectory() as file: + mock_local = Path(file) + mock_runtime_ver = ("sniper", "steamrt3") + mock_session_pools = (MagicMock(), MagicMock()) + result = umu_runtime._restore_umu( + mock_local, mock_runtime_ver, mock_session_pools, mock_cb + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + self.assertTrue( + mock_cb.mock_calls, + "Expected callback to be called", + ) + + def test_setup_umu_update(self): + """Test setup_umu when updating the runtime.""" + result = MagicMock() + + # Mock a new install + with TemporaryDirectory() as file1, TemporaryDirectory() as file2: + # Populate our fake $XDG_DATA_HOME/umu + Path(file2, "umu").touch() + # Mock the runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + # Mock our thread and conn pool + mock_session_pools = (MagicMock(), MagicMock()) + with patch.object(umu_runtime, "_update_umu"): + result = umu_runtime.setup_umu( + Path(file1), + Path(file2), + mock_runtime_ver, + mock_session_pools, + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + + def test_setup_umu_noupdate(self): + """Test setup_umu when setting runtime updates are disabled.""" + result = MagicMock() + os.environ["UMU_RUNTIME_UPDATE"] = "0" + + # Mock a new install + with TemporaryDirectory() as file1, TemporaryDirectory() as file2: + # Populate our fake $XDG_DATA_HOME/umu + Path(file2, "umu").touch() + # Mock the runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + # Mock our thread and conn pool + mock_session_pools = (MagicMock(), MagicMock()) + with patch.object(umu_runtime, "_restore_umu"): + result = umu_runtime.setup_umu( + Path(file1), + Path(file2), + mock_runtime_ver, + mock_session_pools, + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + + def test_setup_umu(self): + """Test setup_umu on new install.""" + result = MagicMock() + + # Mock a new install + with TemporaryDirectory() as file1, TemporaryDirectory() as file2: + # Mock the runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + # Mock our thread and conn pool + mock_session_pools = (MagicMock(), MagicMock()) + with patch.object(umu_runtime, "_restore_umu"): + result = umu_runtime.setup_umu( + Path(file1), + Path(file2), + mock_runtime_ver, + mock_session_pools, + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + + def test_restore_umu_platformid_status_err(self): + """Test _restore_umu_platformid when the server returns a non-200 status code.""" + result = None + # Mock os-release data + mock_osrel = ( + 'PRETTY_NAME="Steam Runtime 3 (sniper)""\n' + 'NAME="Steam Runtime"\n' + 'VERSION_ID="3"\n' + 'VERSION="3 (sniper)"\n' + "VERSION_CODENAME=sniper\n" + "ID=steamrt\n" + "ID_LIKE=debian\n" + 'HOME_URL="https://store.steampowered.com/"\n' + 'SUPPORT_URL="https://help.steampowered.com/"\n' + 'BUG_REPORT_URL="https://github.com/ValveSoftware/steam-runtime/issues"\n' + 'BUILD_ID="0.20241118.108552"\n' + "VARIANT=Platform\n" + 'VARIANT_ID="com.valvesoftware.steamruntime.platform-amd64_i386-sniper"\n' + ) + # Mock the response + mock_resp = MagicMock() + mock_resp.status = 404 + mock_resp.data = b"foo" + mock_resp.getheader.return_value = "foo" + + # Mock the conn pool + mock_hp = MagicMock() + mock_hp.request.return_value = mock_resp + + # Mock thread pool + mock_tp = MagicMock() + + # Mock runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + + with TemporaryDirectory() as file: + mock_runtime_base = Path(file) + mock_osrel_file = mock_runtime_base.joinpath( + "files", "lib", "os-release" + ) + mock_runtime_base.joinpath("files", "lib").mkdir(parents=True) + mock_osrel_file.touch(exist_ok=True) + mock_osrel_file.write_text(mock_osrel) + result = umu_runtime._restore_umu_platformid( + mock_runtime_base, mock_runtime_ver, (mock_tp, mock_hp) + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + + def test_restore_umu_platformid_osrel_none(self): + """Test _restore_umu_platformid when the os-release file is missing.""" + result = None + # Mock the response + mock_resp = MagicMock() + mock_resp.status = 200 + mock_resp.data = b"foo" + + # Mock the conn pool + mock_hp = MagicMock() + mock_hp.request.return_value = mock_resp + + # Mock thread pool + mock_tp = MagicMock() + + # Mock runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + + with TemporaryDirectory() as file: + mock_runtime_base = Path(file) + mock_runtime_base.joinpath("files", "lib").mkdir(parents=True) + result = umu_runtime._restore_umu_platformid( + mock_runtime_base, mock_runtime_ver, (mock_tp, mock_hp) + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + + def test_restore_umu_platformid_osrel_err(self): + """Test _restore_umu_platformid on error parsing os-release.""" + result = None + # Mock os-release data. Remove the BUILD_ID field to error + mock_osrel = ( + 'PRETTY_NAME="Steam Runtime 3 (sniper)""\n' + 'NAME="Steam Runtime"\n' + 'VERSION_ID="3"\n' + 'VERSION="3 (sniper)"\n' + "VERSION_CODENAME=sniper\n" + "ID=steamrt\n" + "ID_LIKE=debian\n" + 'HOME_URL="https://store.steampowered.com/"\n' + 'SUPPORT_URL="https://help.steampowered.com/"\n' + 'BUG_REPORT_URL="https://github.com/ValveSoftware/steam-runtime/issues"\n' + "VARIANT=Platform\n" + 'VARIANT_ID="com.valvesoftware.steamruntime.platform-amd64_i386-sniper"\n' + ) + # Mock the response + mock_resp = MagicMock() + mock_resp.status = 200 + mock_resp.data = b"foo" + + # Mock the conn pool + mock_hp = MagicMock() + mock_hp.request.return_value = mock_resp + + # Mock thread pool + mock_tp = MagicMock() + + # Mock runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + + with TemporaryDirectory() as file: + mock_runtime_base = Path(file) + mock_runtime_base.joinpath("files", "lib").mkdir(parents=True) + mock_osrel_file = mock_runtime_base.joinpath( + "files", "lib", "os-release" + ) + mock_osrel_file.touch(exist_ok=True) + mock_osrel_file.write_text(mock_osrel) + result = umu_runtime._restore_umu_platformid( + mock_runtime_base, mock_runtime_ver, (mock_tp, mock_hp) + ) + self.assertTrue( + result is None, f"Expected None, received {result}" + ) + + def test_restore_umu_platformid(self): + """Test _restore_umu_platformid.""" + result = None + # Mock os-release data + mock_osrel = ( + 'PRETTY_NAME="Steam Runtime 3 (sniper)""\n' + 'NAME="Steam Runtime"\n' + 'VERSION_ID="3"\n' + 'VERSION="3 (sniper)"\n' + "VERSION_CODENAME=sniper\n" + "ID=steamrt\n" + "ID_LIKE=debian\n" + 'HOME_URL="https://store.steampowered.com/"\n' + 'SUPPORT_URL="https://help.steampowered.com/"\n' + 'BUG_REPORT_URL="https://github.com/ValveSoftware/steam-runtime/issues"\n' + 'BUILD_ID="0.20241118.108552"\n' + "VARIANT=Platform\n" + 'VARIANT_ID="com.valvesoftware.steamruntime.platform-amd64_i386-sniper"\n' + ) + # Mock the response + mock_resp = MagicMock() + mock_resp.status = 200 + mock_resp.data = b"foo" + + # Mock the conn pool + mock_hp = MagicMock() + mock_hp.request.return_value = mock_resp + + # Mock thread pool + mock_tp = MagicMock() + + # Mock runtime ver + mock_runtime_ver = ("sniper", "steamrt3") + + with TemporaryDirectory() as file: + mock_runtime_base = Path(file) + mock_osrel_file = mock_runtime_base.joinpath( + "files", "lib", "os-release" + ) + mock_runtime_base.joinpath("files", "lib").mkdir(parents=True) + mock_osrel_file.touch(exist_ok=True) + mock_osrel_file.write_text(mock_osrel) + result = umu_runtime._restore_umu_platformid( + mock_runtime_base, mock_runtime_ver, (mock_tp, mock_hp) + ) + self.assertEqual(result, "foo", f"Expected foo, received {result}") + + def test_write_file_chunks_none(self): + """Test write_file_chunks when not passing a chunk size.""" + with NamedTemporaryFile() as file1, TemporaryFile("rb+") as file2: + chunk_size = 8 + mock_file = Path(file1.name) + hasher = hashlib.blake2b() + file2.write(os.getrandom(chunk_size)) + # Pass a buffered reader as our fake http response + umu_util.write_file_chunks(mock_file, file2, hasher) + self.assertTrue( + hasher.digest(), "Expected hashed data > 0, received 0" + ) + + def test_write_file_chunks(self): + """Test write_file_chunks.""" + with NamedTemporaryFile() as file1, TemporaryFile("rb+") as file2: + chunk_size = 8 + mock_file = Path(file1.name) + hasher = hashlib.blake2b() + file2.write(os.getrandom(chunk_size)) + # Pass a buffered reader as our fake http response + umu_util.write_file_chunks(mock_file, file2, hasher, chunk_size) + self.assertTrue( + hasher.digest(), "Expected hashed data > 0, received 0" + ) + def test_get_gamescope_baselayer_appid_err(self): """Test get_gamescope_baselayer_appid on error. @@ -695,27 +1013,29 @@ def test_fetch_releases_no_assets(self): } # Mock the call to urlopen mock_resp = MagicMock() - mock_resp.read.return_value = b"foo" mock_resp.status = 200 - mock_resp.__enter__.return_value = mock_resp + mock_resp.json.return_value = mock_gh_release + + # Mock thread pool + mock_tp = MagicMock() + + # Mock conn pool + mock_hp = MagicMock() + mock_hp.request.return_value = mock_resp + # Mock PROTONPATH="", representing a download to UMU-Proton os.environ["PROTONPATH"] = "" - with ( - patch.object(umu_proton, "urlopen", return_value=mock_resp), - patch.object(umu_proton, "loads", return_value=mock_gh_release), - ): - result = umu_proton._fetch_releases() - self.assertTrue( - result is not None, "Expected a value, received None" - ) - self.assertTrue( - isinstance(result, tuple), f"Expected tuple, received {result}" - ) - result_len = len(result) - self.assertFalse( - result_len, - f"Expected tuple with no len, received len {result_len}", - ) + + result = umu_proton._fetch_releases((mock_tp, mock_hp)) + self.assertTrue(result is not None, "Expected a value, received None") + self.assertTrue( + isinstance(result, tuple), f"Expected tuple, received {result}" + ) + result_len = len(result) + self.assertFalse( + result_len, + f"Expected tuple with no len, received len {result_len}", + ) def test_fetch_releases(self): """Test _fetch_releases.""" @@ -732,29 +1052,33 @@ def test_fetch_releases(self): }, ] } - # Mock the call to urlopen + + # Mock the response mock_resp = MagicMock() - mock_resp.read.return_value = b"foo" mock_resp.status = 200 - mock_resp.__enter__.return_value = mock_resp + mock_resp.json.return_value = mock_gh_release + # Mock our thread and http pools + + # Mock the thread pool + mock_tp = MagicMock() + + # Mock the call to http pool + mock_hp = MagicMock() + mock_hp.request.return_value = mock_resp + # Mock PROTONPATH="", representing a download to UMU-Proton os.environ["PROTONPATH"] = "" - with ( - patch.object(umu_proton, "urlopen", return_value=mock_resp), - patch.object(umu_proton, "loads", return_value=mock_gh_release), - ): - result = umu_proton._fetch_releases() - self.assertTrue( - result is not None, "Expected a value, received None" - ) - self.assertTrue( - isinstance(result, tuple), f"Expected tuple, received {result}" - ) - result_len = len(result) - self.assertTrue( - result_len, - f"Expected tuple with len, received len {result_len}", - ) + + result = umu_proton._fetch_releases((mock_tp, mock_hp)) + self.assertTrue(result is not None, "Expected a value, received None") + self.assertTrue( + isinstance(result, tuple), f"Expected tuple, received {result}" + ) + result_len = len(result) + self.assertTrue( + result_len, + f"Expected tuple with len, received len {result_len}", + ) def test_update_proton(self): """Test _update_proton.""" @@ -792,7 +1116,7 @@ def test_ge_proton(self): wasn't found in local system. """ test_archive = self.test_archive.rename("GE-Proton9-2.tar.gz") - umu_proton._extract_dir(test_archive) + umu_util.extract_tarfile(test_archive, test_archive.parent) with ( self.assertRaises(FileNotFoundError), @@ -801,12 +1125,11 @@ def test_ge_proton(self): patch.object( umu_proton, "_get_from_steamcompat", return_value=None ), - ThreadPoolExecutor() as thread_pool, ): os.environ["WINEPREFIX"] = self.test_file os.environ["GAMEID"] = self.test_file os.environ["PROTONPATH"] = "GE-Proton" - umu_run.check_env(self.env, thread_pool) + umu_run.check_env(self.env, self.test_session_pools) self.assertEqual( self.env["PROTONPATH"], self.test_compat.joinpath( @@ -859,13 +1182,17 @@ def test_latest_interrupt(self): with ( patch("umu.umu_proton._fetch_proton") as mock_function, - ThreadPoolExecutor() as thread_pool, + ThreadPoolExecutor(), ): # Mock the interrupt # We want the dir we tried to extract to be cleaned mock_function.side_effect = KeyboardInterrupt result = umu_proton._get_latest( - self.env, self.test_compat, tmpdirs, files, thread_pool + self.env, + self.test_compat, + tmpdirs, + files, + self.test_session_pools, ) self.assertFalse( self.env["PROTONPATH"], "Expected PROTONPATH to be empty" @@ -895,12 +1222,16 @@ def test_latest_val_err(self): with ( patch("umu.umu_proton._fetch_proton") as mock_function, - ThreadPoolExecutor() as thread_pool, + ThreadPoolExecutor(), ): # Mock the interrupt mock_function.side_effect = ValueError result = umu_proton._get_latest( - self.env, self.test_compat, tmpdirs, files, thread_pool + self.env, + self.test_compat, + tmpdirs, + files, + self.test_session_pools, ) self.assertFalse( self.env["PROTONPATH"], "Expected PROTONPATH to be empty" @@ -921,10 +1252,14 @@ def test_latest_offline(self): with ( patch("umu.umu_proton._fetch_proton"), - ThreadPoolExecutor() as thread_pool, + ThreadPoolExecutor(), ): result = umu_proton._get_latest( - self.env, self.test_compat, tmpdirs, files, thread_pool + self.env, + self.test_compat, + tmpdirs, + files, + self.test_session_pools, ) self.assertFalse( self.env["PROTONPATH"], "Expected PROTONPATH to be empty" @@ -966,10 +1301,14 @@ def test_link_umu(self): ) with ( patch("umu.umu_proton._fetch_proton"), - ThreadPoolExecutor() as thread_pool, + ThreadPoolExecutor(), ): result = umu_proton._get_latest( - self.env, self.test_compat, tmpdirs, files, thread_pool + self.env, + self.test_compat, + tmpdirs, + files, + self.test_session_pools, ) self.assertTrue(result is self.env, "Expected the same reference") # Verify the latest was set @@ -1010,6 +1349,9 @@ def test_latest_umu(self): with tarfile.open(test_archive.as_posix(), "w:gz") as tar: tar.add(latest.as_posix(), arcname=latest.as_posix()) + # Add the .parts suffix + move(test_archive, self.test_cache.joinpath(f"{latest}.tar.gz.parts")) + # Mock old versions self.test_compat.joinpath("UMU-Proton-9.0-beta15").mkdir() self.test_compat.joinpath("UMU-Proton-9.0-beta14").mkdir() @@ -1027,7 +1369,11 @@ def test_latest_umu(self): ThreadPoolExecutor() as thread_pool, ): result = umu_proton._get_latest( - self.env, self.test_compat, tmpdirs, files, thread_pool + self.env, + self.test_compat, + tmpdirs, + files, + (thread_pool, MagicMock()), ) self.assertTrue(result is self.env, "Expected the same reference") # Verify the latest was set @@ -1097,7 +1443,7 @@ def test_steamcompat(self): """ result = None - umu_proton._extract_dir(self.test_archive) + umu_util.extract_tarfile(self.test_archive, self.test_archive.parent) move(str(self.test_archive).removesuffix(".tar.gz"), self.test_compat) result = umu_proton._get_from_steamcompat(self.env, self.test_compat) @@ -1113,12 +1459,15 @@ def test_steamcompat(self): "Expected PROTONPATH to be proton dir in compat", ) - def test_extract_err(self): - """Test _extract_dir when passed a non-gzip compressed archive. + def test_extract_tarfile_err(self): + """Test extract_tarfile when passed a non-gzip compressed archive. A ReadError should be raised as we only expect .tar.gz releases """ - test_archive = self.test_cache.joinpath(f"{self.test_proton_dir}.tar") + test_archive = self.test_cache.joinpath( + f"{self.test_proton_dir}.tar.zst" + ) + # Do not apply compression with tarfile.open(test_archive.as_posix(), "w") as tar: tar.add( @@ -1126,23 +1475,29 @@ def test_extract_err(self): arcname=self.test_proton_dir.as_posix(), ) - with self.assertRaisesRegex(tarfile.ReadError, "gzip"): - umu_proton._extract_dir(test_archive) + with self.assertRaisesRegex(tarfile.CompressionError, "zst"): + umu_util.extract_tarfile(test_archive, test_archive.parent) if test_archive.exists(): test_archive.unlink() - def test_extract(self): - """Test _extract_dir. + def test_extract_tarfile(self): + """Test extract_tarfile. An error should not be raised when the Proton release is extracted to a temporary directory """ result = None - result = umu_proton._extract_dir(self.test_archive) + result = umu_util.extract_tarfile( + self.test_archive, self.test_archive.parent + ) move(str(self.test_archive).removesuffix(".tar.gz"), self.test_compat) - self.assertFalse(result, "Expected None after extracting") + self.assertEqual( + result, + self.test_archive.parent, + f"Expected {self.test_archive.parent}, received: {result}", + ) self.assertTrue( self.test_compat.joinpath(self.test_proton_dir).exists(), "Expected proton dir to exists in compat", @@ -1169,7 +1524,7 @@ def test_game_drive_libpath_empty(self): Path(self.test_file + "/proton").touch() # Replicate main's execution and test up until enable_steam_game_drive - with patch("sys.argv", ["", ""]), ThreadPoolExecutor() as thread_pool: + with patch("sys.argv", ["", ""]): os.environ["WINEPREFIX"] = self.test_file os.environ["PROTONPATH"] = self.test_file os.environ["GAMEID"] = self.test_file @@ -1177,7 +1532,7 @@ def test_game_drive_libpath_empty(self): # Args args = __main__.parse_args() # Config - umu_run.check_env(self.env, thread_pool) + umu_run.check_env(self.env, self.test_session_pools) # Prefix umu_run.setup_pfx(self.env["WINEPREFIX"]) # Env @@ -1441,7 +1796,7 @@ def test_build_command_linux_exe(self): self.test_user_share, self.test_local_share, self.test_runtime_version, - None, + self.test_session_pools, ) copytree( Path(self.test_user_share, "sniper_platform_0.20240125.75305"), @@ -1528,7 +1883,7 @@ def test_build_command_nopv(self): self.test_user_share, self.test_local_share, self.test_runtime_version, - None, + self.test_session_pools, ) copytree( Path(self.test_user_share, "sniper_platform_0.20240125.75305"), @@ -1660,7 +2015,7 @@ def test_build_command(self): self.test_user_share, self.test_local_share, self.test_runtime_version, - None, + self.test_session_pools, ) copytree( Path(self.test_user_share, "sniper_platform_0.20240125.75305"), diff --git a/umu/umu_test_plugins.py b/umu/umu_test_plugins.py index 797c22ba7..7b620cec6 100644 --- a/umu/umu_test_plugins.py +++ b/umu/umu_test_plugins.py @@ -7,7 +7,7 @@ from argparse import Namespace from pathlib import Path from shutil import copy, copytree, rmtree -from unittest.mock import patch +from unittest.mock import MagicMock, patch from tomllib import TOMLDecodeError @@ -201,12 +201,11 @@ def test_build_command_entry(self): with ( patch.object(umu_runtime, "_install_umu", return_value=None), ): - # TODO umu_runtime.setup_umu( self.test_user_share, self.test_local_share, self.test_runtime_version, - None, + (MagicMock(), MagicMock()), ) copytree( Path(self.test_user_share, "sniper_platform_0.20240125.75305"), @@ -284,7 +283,7 @@ def test_build_command_proton(self): self.test_user_share, self.test_local_share, self.test_runtime_version, - None, + (MagicMock(), MagicMock()), ) copytree( Path(self.test_user_share, "sniper_platform_0.20240125.75305"), @@ -369,7 +368,7 @@ def test_build_command_toml(self): self.test_user_share, self.test_local_share, self.test_runtime_version, - None, + (MagicMock(), MagicMock()), ) copytree( Path(self.test_user_share, "sniper_platform_0.20240125.75305"), diff --git a/umu/umu_util.py b/umu/umu_util.py index fffc7ee20..0bfcc1236 100644 --- a/umu/umu_util.py +++ b/umu/umu_util.py @@ -1,21 +1,22 @@ import os +import sys from contextlib import contextmanager from ctypes.util import find_library from functools import lru_cache -from http.client import HTTPSConnection +from io import BufferedIOBase from pathlib import Path from re import Pattern from re import compile as re_compile from shutil import which -from ssl import SSLContext, create_default_context from subprocess import PIPE, STDOUT, Popen, TimeoutExpired +from tarfile import open as taropen +from urllib3.response import BaseHTTPResponse from Xlib import display +from umu.umu_consts import UMU_LOCAL from umu.umu_log import log -ssl_context: SSLContext | None = None - @lru_cache def get_libc() -> str: @@ -172,26 +173,6 @@ def is_winetricks_verb( return True -@contextmanager -def https_connection(host: str): - """Create an HTTPSConnection.""" - global ssl_context - conn: HTTPSConnection - - if not ssl_context: - ssl_context = create_default_context() - - conn = HTTPSConnection(host, context=ssl_context) - - if os.environ.get("UMU_LOG") in {"1", "debug"}: - conn.set_debuglevel(1) - - try: - yield conn - finally: - conn.close() - - @contextmanager def xdisplay(no: str): """Create a Display.""" @@ -201,3 +182,75 @@ def xdisplay(no: str): yield d finally: d.close() + + +def write_file_chunks( + path: Path, + resp: BufferedIOBase | BaseHTTPResponse, + # Note: hashlib._Hash is internal and an exception will be raised when imported + hasher, # noqa: ANN001 + chunk_size: int = 64 * 1024, +): + """Write a file to path in chunks from a response stream while hashing it. + + Args: + path: file path + resp: urllib3 response streamed response + hasher: hashlib object + chunk_size: max size of data to read from the streamed response + Returns: + hashlib._Hash instance + + """ + buffer: bytearray + view: memoryview + + if not chunk_size: + chunk_size = 64 * 1024 + + buffer = bytearray(chunk_size) + view = memoryview(buffer) + with path.open(mode="ab+", buffering=0) as file: + while size := resp.readinto(buffer): + file.write(view[:size]) + hasher.update(view[:size]) + + return hasher + + +def extract_tarfile(path: Path, dest: Path) -> Path | None: + """Read and securely extract a compressed TAR archive to path. + + Warns the user if unable to extract the archive securely, falling + back to unsafe extraction. The filter used is 'tar_filter'. + + See https://docs.python.org/3/library/tarfile.html#tarfile.tar_filter + """ + if not path.is_file(): + return None + + # Note: r:tar is a valid mode in cpython. + # See https://github.com/python/cpython/blob/b83be9c9718aac42d0d8fc689a829d6594192afa/Lib/tarfile.py#L1871 + with taropen(path, f"r:{path.suffix.removeprefix('.')}") as tar: # type: ignore + try: + from tarfile import tar_filter + + tar.extraction_filter = tar_filter + log.debug("Using data filter for archive") + except ImportError: + # User is on a distro that did not backport extraction filters + log.warning("Python: %s", sys.version) + log.warning("Using no data filter for archive") + log.warning("Archive will be extracted insecurely") + + log.debug("Extracting: %s -> %s", path, dest) + tar.extractall(path=dest) # noqa: S202 + + return dest + + +def has_umu_setup(path: Path = UMU_LOCAL) -> bool: + """Check if umu has been setup in our runtime directory.""" + return path.exists() and any( + file for file in path.glob("*") if not file.name.endswith("lock") + )