diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index cb959a9..0000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[bumpversion] -current_version = 0.4.12 -tag = True -commit = True - -[bumpversion:file:setup.py] - -[bumpversion:file:simple_httpfs/__init__.py] - diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b036d3b..3de3165 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -1,7 +1,4 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package +name: CI on: push: @@ -10,30 +7,37 @@ on: branches: [ master ] jobs: - build: - + test: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.5, 3.6, 3.7, 3.8] + python-version: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + + - name: Install system dependencies + run: sudo apt-get update && sudo apt-get install -y libfuse-dev + + - name: Install uv + uses: astral-sh/setup-uv@v3 + - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - # - name: Test with pytest - # run: | - # pytest + run: uv sync --extra dev + + - name: Lint with ruff + run: uv run ruff check . + + - name: Check formatting with ruff + run: uv run ruff format --check . + + - name: Type check with mypy + run: uv run mypy simple_httpfs/ + + - name: Test with pytest + run: uv run pytest diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index e9adc97..1415c98 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -1,32 +1,31 @@ -# This workflows will upload a Python Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -name: Upload Python Package +name: Publish Python Package to PyPI on: - push: - tags: - - 'v*' + release: + types: [published] + workflow_dispatch: jobs: - deploy: + publish: + name: Publish to PyPI runs-on: ubuntu-latest + permissions: + id-token: write steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 + - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: - python-version: '3.x' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* + python-version: '3.10' + + - name: Install uv + uses: astral-sh/setup-uv@v3 + + - name: Build package + run: uv build + + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.gitignore b/.gitignore index 28aa47a..1e20a71 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,5 @@ __pycache__/ .py[cod] /build /dist +.coverage +coverage.xml diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 55c4c7a..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -include versioneer.py -include simple_httpfs/_version.py diff --git a/README.md b/README.md index 681f9a6..2559ea7 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,21 @@ # simple-httpfs -A simple FUSE-based http file system. Read http files as if they were on -the local filesystem. +A simple FUSE-based http/object storage file system. Read remote files as if they were on the local filesystem. ## Usage ``` -simple-http /my/mount/dir -curl /my/mount/dir/http/slashdot.org/country.js.. +simple-httpfs /my/mount/dir +cat /my/mount/dir/http://slashdot.org/country.js... ``` -URLs are referenced relative to the mount directory and suffixed with `..` in -the style of [Daniel Rozenbergs -httpfs](https://github.com/danielrozenberg/httpfs). +Fully qualified URLs are referenced relative to the mount directory and distinguished from directories by appending a shell-safe trailing sentinel string (default: `...`) in +the style of [Daniel Rozenbergs httpfs](https://github.com/danielrozenberg/httpfs). ## Unmounting +Use `umount` or `fusermount`. + ``` umount /my/mount/dir ``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..72aa68a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,88 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "simple-httpfs" +version = "0.5.0" +description = "A simple FUSE filesystem for reading http files" +readme = "README.md" +license = {text = "MIT"} +authors = [ + {name = "Peter Kerpedjiev", email = "pkerpedjiev@gmail.com"} +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +requires-python = ">=3.10" +dependencies = [ + "diskcache", + "fusepy", + "obstore", + "obspec", + "typing-extensions; python_version < '3.12'", +] + +[project.optional-dependencies] +dev = [ + "mypy>=1.0", + "pytest>=6.0", + "pytest-cov", + "ruff>=0.12.11", +] + +[project.urls] +Homepage = "https://github.com/higlass/simple-httpfs" +Repository = "https://github.com/higlass/simple-httpfs" + +[project.scripts] +simple-httpfs = "simple_httpfs.__main__:main" + +[tool.ruff] +src = ["simple_httpfs"] +target-version = "py310" + +[tool.ruff.lint] +select = [ + "B", # bugbear + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "RUF", # ruff-specific rules + "UP", # pyupgrade +] + +[tool.ruff.lint.isort] +known-first-party = ["simple_httpfs"] + +[tool.pytest.ini_options] +addopts = [ + "--strict-markers", + "--strict-config", + "--verbose", +] + +[tool.mypy] +python_version = "3.12" +strict = true +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +check_untyped_defs = true +disallow_untyped_decorators = true + +# Third party libraries without type stubs +[[tool.mypy.overrides]] +module = [ + "diskcache.*", + "fuse.*", +] +ignore_missing_imports = true \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index f3f10a7..0000000 --- a/setup.cfg +++ /dev/null @@ -1,14 +0,0 @@ -[bumpversion] -current_version = 0.4.1 - -[bdist_wheel] -universal = 1 - -[versioneer] -VCS = git -style = pep440 -versionfile_source = simple_httpfs/_version.py -versionfile_build = -tag_prefix = v -parentdir_prefix = - diff --git a/setup.py b/setup.py deleted file mode 100755 index 17db3f3..0000000 --- a/setup.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -from setuptools import setup, find_packages - -setup( - name="simple-httpfs", - author="Peter Kerpedjiev", - author_email="pkerpedjiev@gmail.com", - packages=["simple_httpfs"], - entry_points={"console_scripts": ["simple-httpfs = simple_httpfs.__main__:main"]}, - url="https://github.com/higlass/simple-httpfs", - description="A simple FUSE filesystem for reading http files", - license="MIT", - long_description=open("README.md").read(), - long_description_content_type="text/markdown", - install_requires=["boto3", "diskcache", "fusepy", "requests", "slugid", "tenacity"], - version="0.4.12", -) diff --git a/simple_httpfs/__init__.py b/simple_httpfs/__init__.py index df9fe47..9797b8e 100644 --- a/simple_httpfs/__init__.py +++ b/simple_httpfs/__init__.py @@ -1,3 +1,9 @@ +from importlib.metadata import version + from .httpfs import HttpFs -__version__ = "0.4.12" +__version__ = version("simple-httpfs") + +__all__ = ["HttpFs", "__version__"] + +del version diff --git a/simple_httpfs/__main__.py b/simple_httpfs/__main__.py index bb713b2..e73a30b 100644 --- a/simple_httpfs/__main__.py +++ b/simple_httpfs/__main__.py @@ -2,15 +2,16 @@ import logging import os.path as op import sys +from typing import Any from fuse import FUSE from .httpfs import HttpFs -def main(): +def main() -> None: parser = argparse.ArgumentParser( - description="""usage: simple-httpfs """ + description="""usage: simple-httpfs [OPTIONS] """ ) parser.add_argument("mountpoint") @@ -23,17 +24,15 @@ def main(): help="Run in the foreground", ) - parser.add_argument("--schema", default=None, type=str) + parser.add_argument("--sentinel", default="...") - parser.add_argument("--block-size", default=2 ** 20, type=int) - - parser.add_argument("--disk-cache-size", default=2 ** 30, type=int) - - parser.add_argument("--disk-cache-dir", default="/tmp/xx") + parser.add_argument("--block-size", default=2**20, type=int) parser.add_argument("--lru-capacity", default=400, type=int) - parser.add_argument("--aws-profile", default=None, type=str) + parser.add_argument("--disk-cache-size", default=2**30, type=int) + + parser.add_argument("--disk-cache-dir", default="/tmp/xx") parser.add_argument( "--allow-other", @@ -42,6 +41,14 @@ def main(): help="Allow other users to access this fuse", ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + help="Enable debug logging", + ) + parser.add_argument("-l", "--log", default=None, type=str) args = vars(parser.parse_args()) @@ -53,58 +60,50 @@ def main(): ) sys.exit(1) - logging.basicConfig(level=logging.INFO) logger = logging.getLogger("simple-httpfs") - # logger.setLevel(logging.DEBUG) - if args["log"]: - hdlr = logging.FileHandler(args["log"]) + handler = logging.FileHandler(args["log"]) formatter = logging.Formatter( "%(asctime)s %(levelname)s %(module)s: %(message)s" ) - hdlr.setFormatter(formatter) - logger.addHandler(hdlr) - - if args["schema"] is None: - schema = op.split(args["mountpoint"].strip("/"))[-1] + handler.setFormatter(formatter) + logger.addHandler(handler) else: - schema = args["schema"] + logging.basicConfig(level=logging.INFO) - if schema not in ["http", "https", "ftp", "s3"]: - print( - "Could not infer schema. Try specifying either http, " - "https or ftp using the --schema argument", - file=sys.stderr, - ) - sys.exit(1) + if args["verbose"]: + logger.setLevel(logging.DEBUG) + + platform_settings: dict[str, Any] = {} + if sys.platform == "darwin": + platform_settings["noapplexattr"] = True + platform_settings["noappledouble"] = True - start_msg = """ + start_msg = f""" Mounting HTTP Filesystem... - schema: {schema} - mountpoint: {mountpoint} - foreground: {foreground} - allow others: {allow_other} -""".format( - schema=schema, - mountpoint=args["mountpoint"], - foreground=args["foreground"], - allow_other=args["allow_other"], - ) + mountpoint: {args["mountpoint"]} + foreground: {args["foreground"]} + allow others: {args["allow_other"]} + direct_io: True +""" print(start_msg, file=sys.stderr) - fuse = FUSE( - HttpFs( - schema, - disk_cache_size=args["disk_cache_size"], - disk_cache_dir=args["disk_cache_dir"], - lru_capacity=args["lru_capacity"], - block_size=args["block_size"], - aws_profile=args["aws_profile"], - logger=logger, - ), + fs = HttpFs( + sentinel=args["sentinel"], + disk_cache_size=args["disk_cache_size"], + disk_cache_dir=args["disk_cache_dir"], + lru_capacity=args["lru_capacity"], + block_size=args["block_size"], + logger=logger, + ) + + _ = FUSE( + fs, args["mountpoint"], foreground=args["foreground"], allow_other=args["allow_other"], + direct_io=True, + **platform_settings, ) diff --git a/simple_httpfs/_caching.py b/simple_httpfs/_caching.py new file mode 100644 index 0000000..783f5fb --- /dev/null +++ b/simple_httpfs/_caching.py @@ -0,0 +1,223 @@ +from __future__ import annotations + +import posixpath as pp +import threading +from collections import OrderedDict +from collections.abc import Iterator, MutableMapping, Sequence +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar +from weakref import WeakValueDictionary + +from diskcache import Cache as DiskCache +from obspec import GetRange, Head, ListResult, ListWithDelimiter, ObjectMeta + +if TYPE_CHECKING: + try: + from collections.abc import Buffer + except ImportError: + from typing_extensions import Buffer + + from obstore import Bytes + + +class Store(GetRange, Head, ListWithDelimiter, Protocol): ... + + +K = TypeVar("K") +V = TypeVar("V") + + +@dataclass +class CacheMonitor: + total_requests: int = 0 + total_blocks: int = 0 + lru_hits: int = 0 + lru_misses: int = 0 + disk_hits: int = 0 + disk_misses: int = 0 + + @property + def lru_hit_rate(self) -> float: + total = self.lru_hits + self.lru_misses + return self.lru_hits / total if total > 0 else 0.0 + + @property + def disk_hit_rate(self) -> float: + total = self.disk_hits + self.disk_misses + return self.disk_hits / total if total > 0 else 0.0 + + def reset(self) -> None: + self.total_requests = 0 + self.total_blocks = 0 + self.lru_hits = 0 + self.lru_misses = 0 + self.disk_hits = 0 + self.disk_misses = 0 + + +class LRUCache(MutableMapping[K, V], Generic[K, V]): + def __init__(self, capacity: int = 128): + self.capacity = capacity + self.cache: OrderedDict[K, V] = OrderedDict() + + def __getitem__(self, key: K) -> V: + """Like dict.__getitem__, but updates usage if key exists.""" + if key not in self.cache: + raise KeyError(key) + self.cache.move_to_end(key) + return self.cache[key] + + def __setitem__(self, key: K, value: V) -> None: + # If cache hit, move to end before updating + if key in self.cache: + self.cache.move_to_end(key) + self.cache[key] = value + # Evict least recently used item if capacity exceeded + if len(self.cache) > self.capacity: + self.cache.popitem(last=False) + + def __delitem__(self, key: K) -> None: + del self.cache[key] + + def __iter__(self) -> Iterator[K]: + return iter(self.cache) + + def __len__(self) -> int: + return len(self.cache) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}({dict(self.cache)}, capacity={self.capacity})" + ) + + def get(self, key: K, default: Any = None) -> Any: + """Like dict.get, but updates usage if key exists.""" + if key in self.cache: + self.cache.move_to_end(key) + return self.cache[key] + return default + + def pop(self, key: K, *args: Any) -> Any: + return self.cache.pop(key, *args) + + def clear(self) -> None: + self.cache.clear() + + +class CachedStore(Store): + def __init__( + self, + store: Store, + *, + base_url: str, + meta_cache: LRUCache[str, ObjectMeta], + mem_cache: LRUCache[str, Buffer], + disk_cache: DiskCache, + block_size: int = 1024 * 1024, + cache_monitor: CacheMonitor | None = None, + ): + self.store = store + scheme, path = base_url.split("://", 1) + self.scheme = scheme + self.base_path = path.lstrip("/") + self.meta_cache = meta_cache + self.mem_cache = mem_cache + self.disk_cache = disk_cache + self.block_size = block_size + self.monitor = cache_monitor or CacheMonitor() + self._block_locks: WeakValueDictionary[str, threading.Lock] = ( + WeakValueDictionary() + ) + self._lock: threading.Lock = threading.Lock() + + def _meta_cache_key(self, path: str) -> str: + full_path = pp.normpath(pp.join(self.base_path, path)).lstrip("/") + return f"{self.scheme}://{full_path}" + + def _block_cache_key(self, path: str, block_num: int) -> str: + full_path = pp.normpath(pp.join(self.base_path, path)).lstrip("/") + return f"{self.scheme}://{full_path}.{self.block_size}.{block_num}" + + def head(self, path: str) -> ObjectMeta: + cache_key = self._meta_cache_key(path) + if cache_key in self.meta_cache: + return self.meta_cache[cache_key] + meta = self.store.head(path) + self.meta_cache[cache_key] = meta + return meta + + def get_range( + self, + path: str, + *, + start: int, + end: int | None = None, + length: int | None = None, + ) -> Bytes | bytes: + pos = start + if length is not None: + end = start + length + elif end is None: + raise ValueError("Either end or length must be provided") + + output = b"" + while pos < end: + block_num = pos // self.block_size + remaining = end - pos + data_start = pos % self.block_size + data_size = min(self.block_size - data_start, remaining) + + # Read the block from cache or from the obstore and write to cache. + # This is thread-safe. + block = self._get_block(path, block_num) + if not block: + break + + if not hasattr(block, "__getitem__"): + block = bytes(block) + + # Extract only the portion we need from this block + output += block[data_start : data_start + data_size : 1] # type: ignore[index] + pos += data_size + + self.monitor.total_requests += 1 + return output + + def _get_lock(self, cache_key: str) -> threading.Lock: + with self._lock: + if cache_key not in self._block_locks: + block_lock = threading.Lock() + self._block_locks[cache_key] = block_lock + else: + block_lock = self._block_locks[cache_key] + return block_lock + + def _get_block(self, path: str, block_num: int) -> Buffer: + cache_key = self._block_cache_key(path, block_num) + + self.monitor.total_blocks += 1 + with self._get_lock(cache_key): + if cache_key in self.mem_cache: + self.monitor.lru_hits += 1 + return self.mem_cache[cache_key] + self.monitor.lru_misses += 1 + + if cache_key in self.disk_cache: + block = self.disk_cache[cache_key] + self.monitor.disk_hits += 1 + self.mem_cache[cache_key] = block + return block # type: ignore[no-any-return] + self.monitor.disk_misses += 1 + + block = self.store.get_range( + "", start=block_num * self.block_size, length=self.block_size + ) + self.mem_cache[cache_key] = block + self.disk_cache[cache_key] = block # Writes to disk + + return block + + def list_with_delimiter( + self, prefix: str | None = None + ) -> ListResult[Sequence[ObjectMeta]]: + return self.store.list_with_delimiter(prefix) diff --git a/simple_httpfs/_ftp.py b/simple_httpfs/_ftp.py new file mode 100644 index 0000000..2b6a49b --- /dev/null +++ b/simple_httpfs/_ftp.py @@ -0,0 +1,190 @@ +import posixpath as pp +from collections.abc import Sequence +from datetime import datetime +from ftplib import FTP, error_perm +from urllib.parse import urlparse + +from obspec import GetRange, Head, ListResult, ListWithDelimiter, ObjectMeta + + +class FTPStore(GetRange, Head, ListWithDelimiter): + """ + An obspec protocol API for FTP. + + See https://developmentseed.org/obspec. + """ + + def __init__(self, url: str, chunk_size: int = 32 * 1024): + o = urlparse(url) + self.host = o.netloc + self.path = o.path + self.chunk_size = chunk_size + + def _open(self, host: str) -> FTP: + ftp = FTP(host) + ftp.login() + + # Set the transfer mode to binary. + ftp.voidcmd("TYPE I") + + return ftp + + def head(self, path: str) -> ObjectMeta: + fpath = "/".join((self.path, path)).replace("//", "/") + ftp = self._open(self.host) + + try: + size = ftp.size(fpath) or 0 + except error_perm as e: + raise FileNotFoundError from e + + # 213 File Status means a modification time was returned + try: + resp = ftp.sendcmd(f"MDTM {fpath}") + if resp.startswith("213"): + last_modified = datetime.strptime(resp[4:].strip(), "%Y%m%d%H%M%S") + else: + last_modified = datetime.now() + except Exception: + last_modified = datetime.now() + + ftp.close() + + return { + "e_tag": None, + "last_modified": last_modified, + "path": fpath, + "size": size, + "version": None, + } + + def get_range( + self, path: str, start: int, end: int | None = None, length: int | None = None + ) -> bytes: + if length is not None: + end = start + length + elif end is None: + raise ValueError("Must specify either 'end' or 'length'") + + fpath = "/".join((self.path, path)).replace("//", "/") + ftp = self._open(self.host) + if start > (ftp.size(fpath) or 0): + data = b"" + else: + conn = ftp.transfercmd(f"RETR {fpath}", rest=start) + amt = end - start + + # Fetch the data in chunks. + data = b"" + while len(data) < amt: + chunk = conn.recv(self.chunk_size) + if chunk: + data += chunk + else: + break + + # Pad with null bytes if we didn't get enough data, or trim. + if len(data) < amt: + data += b"\x00" * (amt - len(data)) + else: + data = data[:amt] + + ftp.close() + return data + + def list_with_delimiter( + self, prefix: str | None = None + ) -> ListResult[Sequence[ObjectMeta]]: + dir_path = _resolve_search_dir(self.path, prefix) + + ftp = self._open(self.host) + try: + dir_listing = ftp.mlsd(dir_path) + except error_perm: + ftp.close() + return {"common_prefixes": [], "objects": []} + + objects: list[ObjectMeta] = [] + common_prefixes: set[str] = set() + + for name, facts in dir_listing: + if name in [".", ".."]: + continue + + # Include the prefix in the returned path + resolved_name = _resolve_path(name, prefix) + if resolved_name is None: + continue + + # Directories are listed under "common_prefixes" + # Files are listed under "objects" + if facts.get("type", "file") == "dir": + common_prefixes.add(resolved_name + "/") + else: + size = int(facts.get("size", 0)) + modify_time = facts.get("modify") + if modify_time is not None: + last_modified = _modify_time_to_datetime(modify_time) + else: + last_modified = datetime.now() + objects.append( + { + "e_tag": None, + "last_modified": last_modified, + "path": resolved_name, + "size": size, + "version": None, + } + ) + + ftp.close() + + return {"common_prefixes": sorted(list(common_prefixes)), "objects": objects} + + +def _resolve_search_dir(path: str, prefix: str | None) -> str: + """ + Resolve the directory to search given ``prefix``. + + If the prefix ends with '/', assume it's a subdirectory of 'path'. + We will search and return all of its contents. + + If prefix doesn't end with '/', assume it's an object/file prefix. + We will search its parent and filter the results for matches. + """ + if prefix is not None: + # Trim back 'prefix' as needed and prepend the base path. + # If 'prefix' does not end with '/' it will get trimmed back to the + # parent of the last component. + parent_dir = pp.dirname(prefix) + if parent_dir: + path = pp.join(path, parent_dir) + + return pp.normpath(path) + + +def _resolve_path(name: str, prefix: str | None) -> str | None: + """ + Resolve the path of a retrieved name based on the search prefix. + + Returns None if the name does not start with the prefix. + """ + if prefix is not None: + # Trim back 'prefix' as needed and append the file name. + # If 'prefix' does not end with '/' it will get trimmed back to the + # parent of the last component. + parent_dir = pp.dirname(prefix) + if parent_dir: + name = pp.join(parent_dir, name) + + # Check for prefix match + if not name.startswith(prefix): + return None + + return pp.normpath(name).lstrip("/") + + +def _modify_time_to_datetime(modify_time: str) -> datetime: + # MLSD returns format: YYYYMMDDHHMMSS or YYYYMMDDHHMMSS.sss + modify_str = modify_time.split(".")[0] # Remove fractional seconds + return datetime.strptime(modify_str, "%Y%m%d%H%M%S") diff --git a/simple_httpfs/httpfs.py b/simple_httpfs/httpfs.py index ca51b79..9e7ecdc 100644 --- a/simple_httpfs/httpfs.py +++ b/simple_httpfs/httpfs.py @@ -1,435 +1,424 @@ -import collections +from __future__ import annotations + import logging -import os -import os.path as op -import re -import sys -import traceback -from errno import EIO, ENOENT -from ftplib import FTP +import posixpath as pp +from errno import EACCES, EIO, ENOENT from stat import S_IFDIR, S_IFREG -from threading import Timer -from time import sleep, time +from time import time +from typing import TYPE_CHECKING, Any, TypedDict from urllib.parse import urlparse -import boto3 -import diskcache as dc -import numpy as np -import requests -from fuse import FUSE, FuseOSError, LoggingMixIn, Operations -from tenacity import ( - retry, - stop_after_attempt, - wait_exponential, - wait_fixed, - wait_random, -) - -import slugid - -CLEANUP_INTERVAL = 60 -CLEANUP_EXPIRED = 60 - -REPORT_INTERVAL = 60 - -DISK_CACHE_SIZE_ENV = "HTTPFS_DISK_CACHE_SIZE" -DISK_CACHE_DIR_ENV = "HTTPFS_DISK_CACHE_DIR" - - -FALSY = {0, "0", False, "false", "False", "FALSE", "off", "OFF"} - - -class LRUCache: - def __init__(self, capacity): - self.capacity = capacity - self.cache = collections.OrderedDict() - - def __getitem__(self, key): - value = self.cache.pop(key) - self.cache[key] = value - return value - - def __setitem__(self, key, value): - try: - self.cache.pop(key) - except KeyError: - if len(self.cache) >= self.capacity: - self.cache.popitem(last=False) - self.cache[key] = value - - def __contains__(self, key): - return key in self.cache - - def __len__(self): - return len(self.cache) - - -class FtpFetcher: - def server_path(self, url): - o = urlparse(url) - - return (o.netloc, o.path) - - def login(self, server): - ftp = FTP(server) - ftp.login() - - try: - # do a retrbinary on a non-existent file - # to set the transfer mode to binary - # use a dummy callback too - ftp.retrbinary(slugid.nice(), lambda x: x + 1) - except: - pass - - return ftp - - def get_size(self, url): - (server, path) = self.server_path(url) - - ftp = self.login(server) - size = ftp.size(path) - ftp.close() - return size - - def get_data(self, url, start, end): - import time - - (server, path) = self.server_path(url) - ftp = self.login(server) - conn = ftp.transfercmd("RETR {}".format(path), rest=start) - - amt = end - start - chunk_size = 1 << 15 - data = [] - while len(data) < amt: - chunk = conn.recv(chunk_size) - if chunk: - data += chunk - else: - break - if len(data) < amt: - data += [0] * (amt - len(data)) - else: - data = data[:amt] - - ftp.close() - t2 = time.time() - return np.array(data, dtype=np.uint8) - - -def is_403(value): - """Return True if the error is a 403 exception""" - return value is not None - - -class HttpFetcher: - SSL_VERIFY = os.environ.get("SSL_VERIFY", True) not in FALSY - - def __init__(self, logger): - self.logger = logger - if not self.SSL_VERIFY: - logger.warning( - "You have set ssl certificates to not be verified. " - "This may leave you vulnerable. " - "http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification" +import obspec.exceptions +import obstore +from diskcache import Cache as DiskCache +from fuse import FuseOSError, LoggingMixIn, Operations +from obspec.exceptions import map_exception + +from ._caching import CachedStore, CacheMonitor, LRUCache, Store +from ._ftp import FTPStore + +if TYPE_CHECKING: + try: + from collections.abc import Buffer + except ImportError: + from typing_extensions import Buffer + + from obstore.store import ( + AzureConfig, + AzureCredentialProvider, + ClientConfig, + GCSConfig, + GCSCredentialProvider, + RetryConfig, + S3Config, + S3CredentialProvider, + ) + + +class StoreConfigDict(TypedDict, total=False): + s3: S3Config + gcs: GCSConfig + azure: AzureConfig + + +class CredentialProviderDict(TypedDict, total=False): + s3: S3CredentialProvider + gcs: GCSCredentialProvider + azure: AzureCredentialProvider + + +def path_to_url(path: str, sentinel: str) -> str | None: + if path == "/" or not path.endswith(sentinel): + return None + + # Repeated slashes were already removed + scheme, path = path.split(":/", 1) + scheme = scheme.lstrip("/") + path = ( + path.replace( + f"{sentinel}/", "/" + ).rstrip( # Remove sentinels from any "parent dirs" + sentinel + ) # Remove trailing sentinel + ) + path = pp.normpath(path) + + return f"{scheme}://{path}" + + +def load_store( + url: str, + *, + configs: StoreConfigDict | None = None, + credential_providers: CredentialProviderDict | None = None, + client_options: ClientConfig | None = None, + retry_config: RetryConfig | None = None, +) -> Store: + try: + scheme: str = obstore.parse_scheme(url) + except Exception: + scheme = urlparse(url).scheme + + config: S3Config | GCSConfig | AzureConfig | None + match scheme: + case "ftp": + return FTPStore(url) + case "http" | "https": + client_options = (client_options or {}).copy() + client_options.setdefault("allow_http", True) + return obstore.store.HTTPStore( + url, client_options=client_options, retry_config=retry_config ) - - def get_size(self, url): - try: - head = requests.head(url, allow_redirects=True, verify=self.SSL_VERIFY) - return int(head.headers["Content-Length"]) - except: - head = requests.get( + case "s3" | "gcs" | "azure": + if configs and scheme in configs: + config = configs[scheme] + else: + config = {"skip_signature": True} + if credential_providers and scheme in credential_providers: + credential_provider = credential_providers[scheme] + else: + credential_provider = None + return obstore.store.from_url( url, - allow_redirects=True, - verify=self.SSL_VERIFY, - headers={"Range": "bytes=0-1"}, + config=config, # type: ignore + client_options=client_options, + retry_config=retry_config, + credential_provider=credential_provider, # type: ignore ) - crange = head.headers["Content-Range"] - match = re.search(r"/(\d+)$", crange) - if match: - return int(match.group(1)) - - self.logger.error(traceback.format_exc()) - raise FuseOSError(ENOENT) - - @retry(wait=wait_fixed(1) + wait_random(0, 2), stop=stop_after_attempt(2)) - def get_data(self, url, start, end): - headers = {"Range": "bytes={}-{}".format(start, end), "Accept-Encoding": ""} - self.logger.info("getting %s %s %s", url, start, end) - r = requests.get(url, headers=headers) - self.logger.info("got %s", r.status_code) - - r.raise_for_status() - block_data = np.frombuffer(r.content, dtype=np.uint8) - return block_data + case _: + return obstore.store.from_url(url) -class S3Fetcher: - SSL_VERIFY = os.environ.get("SSL_VERIFY", True) not in FALSY - - def __init__(self, aws_profile, logger): - self.logger = logger - self.logger.info("Creating S3Fetcher with aws_profile=%s", aws_profile) - self.session = boto3.Session(profile_name=aws_profile) - self.client = self.session.client("s3") - pass - - def parse_bucket_key(self, url): - url_parts = urlparse(url, allow_fragments=False) - bucket = url_parts.netloc - key = url_parts.path.strip("/") - - return bucket, key - - def get_size(self, url): - bucket, key = self.parse_bucket_key(url) - - response = self.client.head_object(Bucket=bucket, Key=key) - size = response["ContentLength"] - return size - - @retry(wait=wait_exponential(multiplier=1, min=4, max=10)) - def get_data(self, url, start, end): - bucket, key = self.parse_bucket_key(url) - obj = boto3.resource("s3").Object(bucket, key) - stream = self.client.get_object( - Bucket=bucket, Key=key, Range="bytes={}-{}".format(start, end) - )["Body"] - contents = stream.read() - block_data = np.frombuffer(contents, dtype=np.uint8) - return block_data - - -class HttpFs(LoggingMixIn, Operations): +class HttpFs(LoggingMixIn, Operations): # type: ignore[misc] """ - A read only http/https/ftp filesystem. - + A read-only http(s)/ftp/object storage filesystem for FUSE. """ def __init__( self, - schema, - disk_cache_size=2 ** 30, - disk_cache_dir="/tmp/xx", - lru_capacity=400, - block_size=2 ** 20, - aws_profile=None, - logger=None, + sentinel: str = "...", + block_size: int = 2**20, + disk_cache_size: int = 2**30, + disk_cache_dir: str = "/tmp/xx", + lru_capacity: int = 400, + store_configs: StoreConfigDict | None = None, + credential_providers: CredentialProviderDict | None = None, + client_options: ClientConfig | None = None, + retry_config: RetryConfig | None = None, + logger: logging.Logger | None = None, ): - self.lru_cache = LRUCache(capacity=lru_capacity) - self.lru_attrs = LRUCache(capacity=lru_capacity) - self.schema = schema - self.logger = logger - self.last_report_time = 0 - self.total_requests = 0 - self.getting = set() - - if not self.logger: + """ + Initialize a filesystem. + + Parameters + ---------- + sentinel : str + The terminal sentinel string to identify paths as URLs. See notes. + block_size : int + The block size to use for reads and writes. + disk_cache_size : int + The size of the disk cache to use. + disk_cache_dir : str + The directory to use for the disk cache. + lru_capacity : int + The capacity of the LRU cache. + store_configs : dict[str, dict] | None + Configuration options for different object store schemes. Each key + is the scheme name, and the value is a typed dict of configuration + options for that scheme. See the obstore documentation for details. + credential_provider : dict[str, Callable] | None + Credential providers for different object store schemes. Each key + is the scheme name, and the value is a callback that returns + credentials. See the obstore documentation for details. + client_options : dict | None + Configuration options for the HTTP client if supported. + retry_config : dict | None + Configuration options for the retry mechanism if supported. + logger : logging.Logger | None + The logger to use for logging. + + Notes + ----- + For a given path, the kernel will traverse its ``/``-based hierarchy + and call ``getattr`` on each component. If any component of the path is + not detected by the filesystem, the entire lookup will fail. Therefore, + for this filesystem to properly recognize a URI, each component of its + path needs to be interpreted as a directory, but the full URI needs to + be interpreted as a file. To accomplish this, the end of a qualified + URI is signalled by the presence of a trailing *sentinel* string. + """ + self.logger: logging.Logger + if not logger: self.logger = logging.getLogger(__name__) - - self.logger.info("Starting with disk_cache_size: %d", disk_cache_size) - - if schema == "http" or schema == "https": - self.fetcher = HttpFetcher(self.logger) - elif schema == "ftp": - self.fetcher = FtpFetcher() - elif schema == "s3": - self.fetcher = S3Fetcher(aws_profile, self.logger) else: - raise ("Unknown schema: {}".format(schema)) - - self.disk_cache = dc.Cache(disk_cache_dir, size_limit=disk_cache_size) - - self.total_blocks = 0 - self.lru_hits = 0 - self.lru_misses = 0 + self.logger = logger + self.logger.info(f"Starting with disk_cache_size: {disk_cache_size}") + + self.sentinel: str = sentinel + self.meta_cache: LRUCache[str, Any] = LRUCache(capacity=lru_capacity) + self.mem_cache: LRUCache[str, Any] = LRUCache(capacity=lru_capacity) + self.disk_cache: DiskCache = DiskCache( + disk_cache_dir, size_limit=disk_cache_size + ) + self.block_size: int = block_size + self.cache_monitor: CacheMonitor = CacheMonitor() + + self.store_configs: StoreConfigDict | None = store_configs or {} + self.credential_providers: CredentialProviderDict | None = ( + credential_providers or {} + ) + self.client_options: ClientConfig | None = client_options + self.retry_config: RetryConfig | None = retry_config + + def _load_cached_store(self, url: str) -> CachedStore: + """ + Load a store from a URL. + """ + if "://" not in url: + raise FuseOSError(ENOENT) - self.disk_hits = 0 - self.disk_misses = 0 - self.block_size = block_size + store = load_store( + url, + configs=self.store_configs, + credential_providers=self.credential_providers, + client_options=self.client_options, + retry_config=self.retry_config, + ) + + return CachedStore( + store, + base_url=url, + meta_cache=self.meta_cache, + mem_cache=self.mem_cache, + disk_cache=self.disk_cache, + block_size=self.block_size, + cache_monitor=self.cache_monitor, + ) + + def getattr(self, path: str, fh: Any = None) -> dict[str, Any]: + """ + Return an attribute dictionary for the given path. + + If the sentinel string is missing, the path is interpreted as an + empty directory. + + Parameters + ---------- + path : str + The URI, unix-normalized as a path. If the sentinel string is + missing, the path is interpreted as an empty directory. + fh : Any + File handle (not used). + + Returns + ------- + dict[str, Any] + The attribute dictionary for the given path. It has keys identical + to the stat C structure of stat(2). + + Notes + ----- + ``st_atime``, ``st_mtime`` and ``st_ctime`` should be unix timestamps + (floats). + """ + url = path_to_url(path, self.sentinel) + if url is None: + return dict(st_mode=(S_IFDIR | 0o555), st_nlink=2) - def getSize(self, url): - try: - return self.fetcher.get_size(url) - except Exception as ex: - self.logger.exception(ex) - raise + store = self._load_cached_store(url) - def getattr(self, path, fh=None): + self.logger.info(f"getattr: HEAD: {url}") try: - if path in self.lru_attrs: - return self.lru_attrs[path] - - if path == "/": - self.lru_attrs[path] = dict(st_mode=(S_IFDIR | 0o555), st_nlink=2) - return self.lru_attrs[path] - - if ( - path[-2:] != ".." - and not path.endswith("..-journal") - and not path.endswith("..-wal") - ): + metadata = store.head("") + except Exception as e: + mapped_exc = map_exception(e) + if isinstance(mapped_exc, obspec.exceptions.NotFoundError): return dict(st_mode=(S_IFDIR | 0o555), st_nlink=2) - url = "{}:/{}".format(self.schema, path[:-2]) - - # there's an exception for the -jounral files created by SQLite - if not path.endswith("..-journal") and not path.endswith("..-wal"): - size = self.getSize(url) - else: - size = 0 - - # logging.info("head: {}".format(head.headers)) - # logging.info("status_code: {}".format(head.status_code)) - # print("url:", url, "head.url", head.url) - - if size is not None: - self.lru_attrs[path] = dict( - st_mode=(S_IFREG | 0o644), - st_nlink=1, - st_size=size, - st_ctime=time(), - st_mtime=time(), - st_atime=time(), - ) + self.logger.error(f"getattr: HEAD: {e}") + if isinstance( + mapped_exc, + obspec.exceptions.PermissionDeniedError + | obspec.exceptions.UnauthenticatedError, + ): + raise FuseOSError(EACCES) from e + elif isinstance( + mapped_exc, + obspec.exceptions.InvalidPathError + | obspec.exceptions.NotSupportedError, + ): + raise FuseOSError(ENOENT) from e else: - self.lru_attrs[path] = dict(st_mode=(S_IFDIR | 0o555), st_nlink=2) - - return self.lru_attrs[path] - except Exception as ex: - self.logger.exception(ex) - raise - - def unlink(self, path): - return 0 - - def create(self, path, mode, fi=None): - return 0 + raise FuseOSError(EIO) from e + + # Cached directories marked as None + if metadata is None: + return dict(st_mode=(S_IFDIR | 0o555), st_nlink=2) + + now = time() + timestamp = ( + metadata["last_modified"].timestamp() + if "last_modified" in metadata + else now + ) + return dict( + st_mode=(S_IFREG | 0o644), + st_nlink=1, + st_size=metadata["size"], + st_ctime=timestamp, + st_mtime=timestamp, + st_atime=now, + ) + + def readdir(self, path: str, fh: Any = None) -> list[str]: + """ + Return a list of files in the directory. + + Parameters + ---------- + path : str + The URI, unix-normalized as a path. If the sentinel string is + missing, the path is interpreted as an empty directory. + fh : Any + File handle (not used). + + Returns + ------- + list[str] + A list of contents of the "directory", including sentinel strings. + """ + url = path_to_url(path, self.sentinel) + if url is None: + return [".", ".."] - def write(self, path, buf, size, offset, fip): - return 0 + store = self._load_cached_store(url) - def read(self, path, size, offset, fh): - t1 = time() - - self.logger.debug("read %s %s %s", path, offset, size) - - if t1 - self.last_report_time > REPORT_INTERVAL: - """ - self.logger.info( - "lru hits: {} lru misses: {} disk hits: {} total_requests: {}".format( - self.lru_hits, - self.lru_misses, - self.disk_hits, - self.disk_misses, - self.total_requests, - ) - ) - """ - pass + self.logger.info(f"readdir: LIST_WITH_DELIMITER: {url}") try: - self.total_requests += 1 - - attr = self.getattr(path) - url = "{}:/{}".format(self.schema, path[:-2]) - - self.logger.debug("read url: {}".format(url)) - self.logger.debug( - "offset: {} - {} request_size (KB): {:.2f} block: {}".format( - offset, - offset + size - 1, - size / 2 ** 10, - offset // self.block_size, - ) - ) - output = np.zeros((size,), np.uint8) - - t1 = time() - - # nothing fetched yet - last_fetched = -1 - curr_start = offset - - while last_fetched < offset + size: - block_num = curr_start // self.block_size - block_start = self.block_size * (curr_start // self.block_size) + result = store.list_with_delimiter() + except Exception as e: + self.logger.error(f"readdir: LIST_WITH_DELIMITER: {e}") + mapped_exc = map_exception(e) + if isinstance(mapped_exc, obspec.exceptions.NotFoundError): + return [".", ".."] # Return empty directory for not found + elif isinstance( + mapped_exc, + obspec.exceptions.PermissionDeniedError + | obspec.exceptions.UnauthenticatedError, + ): + raise FuseOSError(EACCES) from e + else: + raise FuseOSError(EIO) from e - block_id = (url, block_num) - while block_id in self.getting: - sleep(0.05) + dirs = [(item + self.sentinel) for item in result["common_prefixes"]] + # Mark directories as seen by caching None + for dir in result["common_prefixes"]: + cache_key = store._meta_cache_key(dir) + if cache_key not in self.meta_cache: + self.meta_cache[cache_key] = None - self.getting.add(block_id) - block_data = self.get_block(url, block_num) - self.getting.remove(block_id) + files = [(item["path"] + self.sentinel) for item in result["objects"]] + # Cache file metadata for all objects seen in listing + for item in result["objects"]: + cache_key = store._meta_cache_key(item["path"]) + if cache_key not in self.meta_cache: + self.meta_cache[cache_key] = item - data_start = ( - curr_start - (curr_start // self.block_size) * self.block_size - ) + return [".", "..", *dirs, *files] - data_end = min(self.block_size, offset + size - block_start) - data = block_data[data_start:data_end] + def read(self, path: str, size: int, offset: int, fh: Any = None) -> Buffer | bytes: + """ + Return a byte string containing the data requested. + + Parameters + ---------- + path : str + The URI, unix-normalized as a path. If the sentinel string is + missing, the path is interpreted as an empty directory. + size : int + The number of bytes to read. + offset : int + The offset to start reading from. + fh : Any + File handle (not used). + + Returns + ------- + Buffer | bytes + The requested data. + """ + url = path_to_url(path, self.sentinel) + if url is None: + return b"" - d_start = curr_start - offset - output[d_start : d_start + len(data)] = data + store = self._load_cached_store(url) - last_fetched = curr_start + (data_end - data_start) - curr_start += data_end - data_start + self.logger.debug( + f"read: GET_RANGE: {url}\n" + f"range: {offset} - {offset + size - 1}\n" + f"request_size (KB): {size / 1024:.2f}\n" + ) + try: + return store.get_range(url, start=offset, length=size) + except Exception as e: + self.logger.error(f"read: GET_RANGE: {e}") + mapped_exc = map_exception(e) + if isinstance(mapped_exc, obspec.exceptions.NotFoundError): + raise FuseOSError(ENOENT) from e + elif isinstance( + mapped_exc, + obspec.exceptions.PermissionDeniedError + | obspec.exceptions.UnauthenticatedError, + ): + raise FuseOSError(EACCES) from e + else: + raise FuseOSError(EIO) from e - bts = bytes(output) + def link(self, target: str, source: str) -> None: ... - return bts + def symlink(self, target: str, source: str) -> None: ... - except Exception as ex: - self.logger.exception(ex) - raise + def unlink(self, path: str) -> None: ... - def destroy(self, path): - self.disk_cache.close() + def write(self, path: str, buf: bytes, size: int, offset: int, fip: Any) -> int: + return 0 - def get_block(self, url, block_num): + def statfs(self, path: str) -> dict[str, int]: """ - Get a data block from a URL. Blocks are 256K bytes in size - - Parameters: - ----------- - url: string - The url of the file we want to retrieve a block from - block_num: int - The # of the 256K'th block of this file + Some fake facts about the filesystem. """ - cache_key = "{}.{}.{}".format(url, self.block_size, block_num) - cache = self.disk_cache - - self.total_blocks += 1 - - if cache_key in self.lru_cache: - self.lru_hits += 1 - hit = self.lru_cache[cache_key] - return hit - else: - self.lru_misses += 1 - - if cache_key in self.disk_cache: - self.logger.info("cache hit: %s", cache_key) - try: - block_data = self.disk_cache[cache_key] - self.disk_hits += 1 - self.lru_cache[cache_key] = block_data - return block_data - except KeyError: - pass - - self.disk_misses += 1 - block_start = block_num * self.block_size - - self.logger.info("getting data %s", cache_key) - block_data = self.fetcher.get_data( - url, block_start, block_start + self.block_size - 1 - ) - - self.lru_cache[cache_key] = block_data - self.disk_cache[cache_key] = block_data - - return block_data + fs_block_size = 128 * 1024 + return dict( + f_frsize=fs_block_size, # fundamental block size + f_bsize=fs_block_size, # preferred block size + f_blocks=1024 * 1024, # pretend total capacity (in blocks) + f_bfree=512 * 1024, # half free for root + f_bavail=512 * 1024, # same for non-root + f_namemax=8192, # maximum filename length (in chars) + ) + + def destroy(self, path: str) -> None: + """ + Called on filesystem destruction. Path is always `/`. + """ + self.disk_cache.close() diff --git a/test.sh b/test.sh deleted file mode 100755 index 9a6bc51..0000000 --- a/test.sh +++ /dev/null @@ -1,19 +0,0 @@ -DIR=x11 -umount /tmp/$DIR/https -python simple_httpfs/simple-httpfs.py /tmp/$DIR/https - -cat /tmp/${DIR}/https/s3.amazonaws.com/pkerp/public/tiny.txt.. - -umount $DIR/https -python simple_httpfs/simple-httpfs.py $DIR/https - -cat $DIR/https/s3.amazonaws.com/pkerp/public/tiny.txt.. - -umount /tmp/$DIR/https -umount $DIR/https - -python simple_httpfs/simple-httpfs.py $DIR/http - -head $DIR/http/hgdownload.cse.ucsc.edu/goldenpath/hg19/encodeDCC/wgEncodeSydhTfbs/wgEncodeSydhTfbsGm12878InputStdSig.bigWig.. - -umount $DIR/http diff --git a/tests/smoke_tests.sh b/tests/smoke_tests.sh new file mode 100644 index 0000000..11a4275 --- /dev/null +++ b/tests/smoke_tests.sh @@ -0,0 +1,37 @@ +#!/bin/bash +# +# Smoke tests for simple-httpfs FUSE filesystem +# +# These tests require FUSE to be available and should be run manually. +# They are not part of the automated test suite. +# + +set -euxo pipefail + +TARGET1="https://raw.githubusercontent.com/octocat/Hello-World/master/README" +TARGET2="s3://pkerp/public/tiny.txt" +MOUNT_POINT="/tmp/cloud" +EOL="..." + +echo "Starting filesystem..." +mkdir -p "$MOUNT_POINT" +simple-httpfs -f -v "$MOUNT_POINT" --log /dev/null & +sleep 2 + +echo "Testing HTTP..." +http_url="$MOUNT_POINT/https:/raw.githubusercontent.com/octocat/Hello-World/master/README" +ls -la $MOUNT_POINT/$TARGET1$EOL +ls -la $MOUNT_POINT/$TARGET2$EOL +head -c 100 $MOUNT_POINT/$TARGET1$EOL +wc -c $MOUNT_POINT/$TARGET1$EOL + +echo "Testing concurrent operations..." +head -c 100 $MOUNT_POINT/$TARGET1$EOL & +pid1=$! +head -c 100 $MOUNT_POINT/$TARGET2$EOL & +pid2=$! +wait $pid1 $pid2 +echo "Concurrent operations completed successfully"; + +echo "Stopping filesystem..." +umount "$MOUNT_POINT" \ No newline at end of file diff --git a/tests/test_caching.py b/tests/test_caching.py new file mode 100644 index 0000000..fe36ac5 --- /dev/null +++ b/tests/test_caching.py @@ -0,0 +1,519 @@ +from collections.abc import Iterator + +import obstore +import pytest +from diskcache import Cache as DiskCache + +from simple_httpfs._caching import CachedStore, CacheMonitor, LRUCache + + +class TestLRUCache: + """Test cases for LRUCache implementation.""" + + def test_init_default_capacity(self): + cache = LRUCache() + assert cache.capacity == 128 + assert len(cache) == 0 + + def test_init_custom_capacity(self): + cache = LRUCache(capacity=64) + assert cache.capacity == 64 + assert len(cache) == 0 + + def test_basic_setitem_getitem(self): + cache = LRUCache(capacity=3) + cache["key1"] = "value1" + cache["key2"] = "value2" + + assert cache["key1"] == "value1" + assert cache["key2"] == "value2" + assert len(cache) == 2 + + def test_getitem_keyerror(self): + cache = LRUCache() + with pytest.raises(KeyError): + _ = cache["nonexistent"] + + def test_delitem(self): + cache = LRUCache() + cache["key"] = "value" + assert "key" in cache + + del cache["key"] + assert "key" not in cache + assert len(cache) == 0 + + def test_delitem_keyerror(self): + cache = LRUCache() + with pytest.raises(KeyError): + del cache["nonexistent"] + + def test_update_existing_key(self): + cache = LRUCache(capacity=3) + cache["key"] = "value1" + cache["other"] = "other_value" + + # Update existing key - should move to end + cache["key"] = "value2" + + assert cache["key"] == "value2" + assert len(cache) == 2 + + def test_lru_eviction(self): + cache = LRUCache(capacity=3) + + # Fill to capacity + cache["a"] = "value_a" + cache["b"] = "value_b" + cache["c"] = "value_c" + assert len(cache) == 3 + + # Add one more - should evict 'a' (least recently used) + cache["d"] = "value_d" + assert len(cache) == 3 + assert "a" not in cache + assert "b" in cache + assert "c" in cache + assert "d" in cache + + def test_lru_access_updates_order(self): + cache = LRUCache(capacity=3) + + # Fill to capacity + cache["a"] = "value_a" + cache["b"] = "value_b" + cache["c"] = "value_c" + + # Access 'a' to make it most recently used + _ = cache["a"] + + # Add one more - should evict 'b' (now least recently used) + cache["d"] = "value_d" + assert "a" in cache # Should still be there + assert "b" not in cache # Should be evicted + assert "c" in cache + assert "d" in cache + + def test_lru_update_existing_moves_to_end(self): + cache = LRUCache(capacity=3) + + # Fill to capacity + cache["a"] = "value_a" + cache["b"] = "value_b" + cache["c"] = "value_c" + + # Update 'a' - should move it to end + cache["a"] = "new_value_a" + + # Add one more - should evict 'b' (now least recently used) + cache["d"] = "value_d" + assert cache["a"] == "new_value_a" # Should still be there + assert "b" not in cache # Should be evicted + assert "c" in cache + assert "d" in cache + + def test_get_method_with_default(self): + cache = LRUCache() + cache["exists"] = "value" + + assert cache.get("exists") == "value" + assert cache.get("nonexistent") is None + assert cache.get("nonexistent", "default") == "default" + + def test_get_method_updates_order(self): + cache = LRUCache(capacity=3) + + # Fill to capacity + cache["a"] = "value_a" + cache["b"] = "value_b" + cache["c"] = "value_c" + + # Use get() to access 'a' + assert cache.get("a") == "value_a" + + # Add one more - should evict 'b' (now least recently used) + cache["d"] = "value_d" + assert "a" in cache # Should still be there + assert "b" not in cache # Should be evicted + + def test_pop_method(self): + cache = LRUCache() + cache["key"] = "value" + + result = cache.pop("key") + assert result == "value" + assert "key" not in cache + + def test_pop_method_with_default(self): + cache = LRUCache() + + result = cache.pop("nonexistent", "default") + assert result == "default" + + def test_pop_method_keyerror(self): + cache = LRUCache() + with pytest.raises(KeyError): + cache.pop("nonexistent") + + def test_clear_method(self): + cache = LRUCache() + cache["a"] = "value_a" + cache["b"] = "value_b" + + assert len(cache) == 2 + cache.clear() + assert len(cache) == 0 + assert "a" not in cache + assert "b" not in cache + + def test_iter(self): + cache = LRUCache() + cache["a"] = "value_a" + cache["b"] = "value_b" + cache["c"] = "value_c" + + keys = list(cache) + assert set(keys) == {"a", "b", "c"} + assert len(keys) == 3 + + def test_iter_is_iterator(self): + cache = LRUCache() + cache["a"] = "value_a" + + assert isinstance(iter(cache), Iterator) + + def test_contains(self): + cache = LRUCache() + cache["exists"] = "value" + + assert "exists" in cache + assert "nonexistent" not in cache + + def test_repr(self): + cache = LRUCache(capacity=64) + cache["a"] = 1 + cache["b"] = 2 + + repr_str = repr(cache) + assert "LRUCache" in repr_str + assert "capacity=64" in repr_str + # Should contain the dict contents + assert "'a': 1" in repr_str or "'b': 2" in repr_str + + def test_capacity_zero_behavior(self): + cache = LRUCache(capacity=0) + + # Should immediately evict anything added + cache["key"] = "value" + assert len(cache) == 0 + assert "key" not in cache + + def test_capacity_one_behavior(self): + cache = LRUCache(capacity=1) + + cache["a"] = "value_a" + assert len(cache) == 1 + assert "a" in cache + + # Adding another should evict the first + cache["b"] = "value_b" + assert len(cache) == 1 + assert "a" not in cache + assert "b" in cache + + def test_large_capacity(self): + cache = LRUCache(capacity=1000) + + # Add many items + for i in range(500): + cache[f"key_{i}"] = f"value_{i}" + + assert len(cache) == 500 + # All should still be there + for i in range(500): + assert f"key_{i}" in cache + + def test_mixed_key_types(self): + cache = LRUCache() + + cache["string_key"] = "string_value" + cache[42] = "int_key_value" + cache[("tuple", "key")] = "tuple_key_value" + + assert cache["string_key"] == "string_value" + assert cache[42] == "int_key_value" + assert cache[("tuple", "key")] == "tuple_key_value" + + def test_complex_eviction_scenario(self): + cache = LRUCache(capacity=4) + + # Fill cache + for i in range(4): + cache[f"key_{i}"] = f"value_{i}" + + # Access keys in different order to change LRU order + _ = cache["key_1"] # key_1 becomes most recent + _ = cache["key_3"] # key_3 becomes most recent + cache["key_0"] = "updated_value_0" # key_0 becomes most recent + + # LRU order should now be: key_2, key_1, key_3, key_0 + # Adding new item should evict key_2 + cache["new_key"] = "new_value" + + assert "key_2" not in cache # Should be evicted + assert "key_1" in cache + assert "key_3" in cache + assert cache["key_0"] == "updated_value_0" + assert cache["new_key"] == "new_value" + + +class TestCachedStore: + """Test cases for CachedStore wrapper.""" + + @pytest.fixture + def memory_store(self): + store = obstore.store.MemoryStore() + + # Add test data at empty path (what CachedStore expects for block access) + file1_data = ( + b"Hello, World! This is file1 content for testing caching behavior." + ) + store.put("", file1_data) + + # Also add named files for list operations + store.put("file1.txt", file1_data) + store.put( + "file2.txt", b"This is file2 with different content for cache testing." + ) + + return store + + @pytest.fixture + def caches(self): + meta_cache = LRUCache(capacity=10) + mem_cache = LRUCache(capacity=10) + disk_cache = DiskCache() + return meta_cache, mem_cache, disk_cache + + @pytest.fixture + def cached_store(self, memory_store, caches): + meta_cache, mem_cache, disk_cache = caches + return CachedStore( + store=memory_store, + base_url="memory://test", + meta_cache=meta_cache, + mem_cache=mem_cache, + disk_cache=disk_cache, + block_size=32, + cache_monitor=CacheMonitor(), + ) + + def test_init(self, memory_store, caches): + meta_cache, mem_cache, disk_cache = caches + cached_store = CachedStore( + store=memory_store, + base_url="http://example.com/data", + meta_cache=meta_cache, + mem_cache=mem_cache, + disk_cache=disk_cache, + block_size=1024, + cache_monitor=CacheMonitor(), + ) + + assert cached_store.store is memory_store + assert cached_store.scheme == "http" + assert cached_store.base_path == "example.com/data" + assert cached_store.meta_cache is meta_cache + assert cached_store.mem_cache is mem_cache + assert cached_store.disk_cache is disk_cache + assert cached_store.block_size == 1024 + + def test_head_cache(self, cached_store): + cache_key = "memory://test/file1.txt" + assert cache_key not in cached_store.meta_cache + + # First call should miss cache and hit the underlying store + result = cached_store.head("file1.txt") + assert result["path"] == "file1.txt" + assert result["size"] == 65 + assert result["e_tag"] is not None # MemoryStore generates e_tags + + # Result should be cached + assert cache_key in cached_store.meta_cache + assert cached_store.meta_cache[cache_key] == result + assert cached_store.head("file1.txt") == result + + def test_head_file_not_found(self, cached_store): + with pytest.raises(FileNotFoundError): + cached_store.head("nonexistent.txt") + + def test_get_range_single_block(self, cached_store): + # Request data fits in one 32-byte block + data = cached_store.get_range("file1.txt", start=0, length=20) + expected = b"Hello, World! This i" + assert data == expected + + def test_get_range_multiple_blocks(self, cached_store): + # Request spans multiple 32-byte blocks + data = cached_store.get_range("file1.txt", start=0, length=50) + expected = b"Hello, World! This is file1 content for testing ca" + assert data == expected + + def test_get_range_partial_block(self, cached_store): + # Request data starting in the middle of a block + data = cached_store.get_range("file1.txt", start=10, length=10) + expected = b"ld! This i" + assert data == expected + + def test_get_range_with_end_parameter(self, cached_store): + data = cached_store.get_range("file1.txt", start=0, end=15) + expected = b"Hello, World! T" + assert data == expected + + def test_get_range_length_required_error(self, cached_store): + with pytest.raises(ValueError, match="Either end or length must be provided"): + cached_store.get_range("file1.txt", start=0) + + def test_block_caching_mem_cache(self, cached_store): + # First request should load blocks into cache + data1 = cached_store.get_range("file1.txt", start=0, length=32) + + # Check that block is in memory cache + cache_key = "memory://test/file1.txt.32.0" + assert cache_key in cached_store.mem_cache + + # Second request for same block should hit memory cache + data2 = cached_store.get_range("file1.txt", start=0, length=32) + + assert data1 == data2 + + def test_block_caching_disk_cache(self, cached_store): + # Load a block + data1 = cached_store.get_range("file1.txt", start=0, length=32) + cache_key = "memory://test/file1.txt.32.0" + + # Block should be in both memory and disk cache + assert cache_key in cached_store.mem_cache + assert cache_key in cached_store.disk_cache + + # Remove from memory cache to test disk cache hit + del cached_store.mem_cache[cache_key] + assert cache_key not in cached_store.mem_cache + + # Request same block - should hit disk cache + data2 = cached_store.get_range("file1.txt", start=0, length=32) + assert data1 == data2 + + # Block should be back in memory cache + assert cache_key in cached_store.mem_cache + + def test_cache_key_generation(self, cached_store): + # Test that cache keys are generated correctly for different operations + data1 = cached_store.get_range("file1.txt", start=0, length=32) + data2 = cached_store.get_range("file1.txt", start=32, length=32) + + # Should have separate cache entries for different blocks + cache_key1 = "memory://test/file1.txt.32.0" # Block 0 + cache_key2 = "memory://test/file1.txt.32.1" # Block 1 + + assert cache_key1 in cached_store.mem_cache + assert cache_key2 in cached_store.mem_cache + + # Different blocks should have different content + assert data1 != data2 + + def test_list_with_delimiter_returns_correct_structure(self, cached_store): + result = cached_store.list_with_delimiter() + file_paths = {obj["path"] for obj in result["objects"]} + assert len(file_paths) == 2 + assert "file1.txt" in file_paths + assert "file2.txt" in file_paths + + # Verify result structure matches expected format + assert "objects" in result + assert "common_prefixes" in result + assert isinstance(result["objects"], list) + assert isinstance(result["common_prefixes"], list) + + def test_path_prefix_handling(self, memory_store, caches): + meta_cache, mem_cache, disk_cache = caches + cached_store = CachedStore( + store=memory_store, + base_url="memory://example.com/data/files/", + meta_cache=meta_cache, + mem_cache=mem_cache, + disk_cache=disk_cache, + block_size=32, + cache_monitor=CacheMonitor(), + ) + _ = cached_store.head("file1.txt") + + # Cache key should be normalized + cache_key = "memory://example.com/data/files/file1.txt" + assert cache_key in cached_store.meta_cache + + def test_block_size_configuration(self, memory_store, caches): + meta_cache, mem_cache, disk_cache = caches + cached_store = CachedStore( + store=memory_store, + base_url="memory://test", + meta_cache=meta_cache, + mem_cache=mem_cache, + disk_cache=disk_cache, + block_size=16, # Smaller block size + cache_monitor=CacheMonitor(), + ) + _ = cached_store.get_range("file1.txt", start=0, length=20) + + # Should create cache keys with the correct block size + cache_key = "memory://test/file1.txt.16.0" + assert ( + cache_key in cached_store.mem_cache or cache_key in cached_store.disk_cache + ) + + def test_scheme_based_cache_isolation(self, memory_store, caches): + meta_cache, mem_cache, disk_cache = caches + + # Create two CachedStore instances with different schemes + http_store = CachedStore( + store=memory_store, + base_url="http://example.com/test", + meta_cache=meta_cache, + mem_cache=mem_cache, + disk_cache=disk_cache, + block_size=32, + cache_monitor=CacheMonitor(), + ) + + s3_store = CachedStore( + store=memory_store, + base_url="s3://example.com/test", + meta_cache=meta_cache, + mem_cache=mem_cache, + disk_cache=disk_cache, + block_size=32, + cache_monitor=CacheMonitor(), + ) + + # Access the same file through both stores + http_store.get_range("file1.txt", start=0, length=32) + s3_store.get_range("file1.txt", start=0, length=32) + + # Both stores should have separate cache entries + http_cache_key = "http://example.com/test/file1.txt.32.0" + s3_cache_key = "s3://example.com/test/file1.txt.32.0" + + assert http_cache_key in mem_cache + assert s3_cache_key in mem_cache + assert http_cache_key != s3_cache_key + + # Same for metadata cache + http_meta_key = "http://example.com/test/file1.txt" + s3_meta_key = "s3://example.com/test/file1.txt" + + http_store.head("file1.txt") + s3_store.head("file1.txt") + + assert http_meta_key in meta_cache + assert s3_meta_key in meta_cache + assert http_meta_key != s3_meta_key diff --git a/tests/test_ftp_store.py b/tests/test_ftp_store.py new file mode 100644 index 0000000..d5f9f38 --- /dev/null +++ b/tests/test_ftp_store.py @@ -0,0 +1,325 @@ +import ftplib +from datetime import datetime +from unittest.mock import Mock, patch + +import pytest + +from simple_httpfs._ftp import ( + FTPStore, + _modify_time_to_datetime, + _resolve_path, + _resolve_search_dir, +) + + +class MockFTP: + """Mock FTP server for testing.""" + + def __init__(self, host): + self.host = host + self.files = { + "/test/file.txt": b"Hello, World! This is a test file content.", + "/test/large.txt": b"A" * 10000, # 10KB file + "/test/empty.txt": b"", + "/test/subdir/foo.txt": b"foo content", + "/test/subdir/bar.txt": b"bar content", + } + # Mock MLSD responses for different directories + self.directories = { + "/test": [ + ( + "file.txt", + {"size": "42", "modify": "20231201123000", "type": "file"}, + ), + ( + "large.txt", + {"size": "10000", "modify": "20231202143000", "type": "file"}, + ), + ( + "empty.txt", + {"size": "0", "modify": "20231203153000", "type": "file"}, + ), + ("subdir", {"type": "dir"}), + ], + "/test/subdir": [ + ("foo.txt", {"size": "11", "modify": "20231204123000", "type": "file"}), + ("bar.txt", {"size": "11", "modify": "20231205123000", "type": "file"}), + ], + } + + def login(self): + pass + + def voidcmd(self, cmd): + if cmd == "TYPE I": + return "200 Type set to I" + return "200 OK" + + def size(self, path): + if path in self.files: + return len(self.files[path]) + raise ftplib.error_perm("550 File not found") + + def sendcmd(self, cmd): + if cmd.startswith("MDTM "): + path = cmd[5:] + if path in self.files: + return "213 20231201123000" + raise ftplib.error_perm("550 File not found") + return "200 OK" + + def transfercmd(self, cmd, rest=0): + if cmd.startswith("RETR "): + path = cmd[5:] + if path in self.files: + data = self.files[path][rest:] + mock_conn = Mock() + mock_conn.recv = Mock(side_effect=self._create_recv_func(data)) + return mock_conn + raise ftplib.error_perm("550 File not found") + raise ftplib.error_perm("502 Command not implemented") + + def _create_recv_func(self, data): + chunks = [data[i : i + 1024] for i in range(0, len(data), 1024)] + chunks.append(b"") # EOF + return lambda size: chunks.pop(0) if chunks else b"" + + def mlsd(self, path): + # Normalize path for lookup (handle both /test and /test/) + normalized_path = path.rstrip("/") + if normalized_path in self.directories: + return self.directories[normalized_path] + # Also try with trailing slash + if path in self.directories: + return self.directories[path] + raise ftplib.error_perm("550 Directory not found") + + def close(self): + pass + + +@pytest.fixture +def mock_ftp(): + with patch("simple_httpfs._ftp.FTP") as mock_ftp_class: + mock_instance = MockFTP("ftp.example.com") + mock_ftp_class.return_value = mock_instance + yield mock_instance + + +@pytest.fixture +def ftp_store(): + return FTPStore("ftp://ftp.example.com/test/") + + +class TestFTPStore: + """Test cases for FTPStore implementation.""" + + def test_init(self): + store = FTPStore("ftp://ftp.example.com/path/to/files/") + assert store.host == "ftp.example.com" + assert store.path == "/path/to/files/" + assert store.chunk_size == 32 * 1024 + + def test_init_custom_chunk_size(self): + store = FTPStore("ftp://ftp.example.com/", chunk_size=8192) + assert store.chunk_size == 8192 + + def test_head_success(self, ftp_store, mock_ftp): + metadata = ftp_store.head("file.txt") + + assert metadata["path"] == "/test/file.txt" + assert metadata["size"] == 42 + assert isinstance(metadata["last_modified"], datetime) + assert metadata["e_tag"] is None + assert metadata["version"] is None + + def test_head_file_not_found(self, ftp_store, mock_ftp): + mock_ftp.size = Mock(side_effect=ftplib.error_perm("550 File not found")) + + with pytest.raises(FileNotFoundError): + ftp_store.head("nonexistent.txt") + + def test_get_range_full_file(self, ftp_store, mock_ftp): + data = ftp_store.get_range("file.txt", start=0, length=42) + assert data == b"Hello, World! This is a test file content." + + def test_get_range_partial(self, ftp_store, mock_ftp): + data = ftp_store.get_range("file.txt", start=7, length=5) + assert data == b"World" + + def test_get_range_with_end(self, ftp_store, mock_ftp): + data = ftp_store.get_range("file.txt", start=0, end=12) + assert data == b"Hello, World" + + def test_get_range_beyond_file_size(self, ftp_store, mock_ftp): + # When start > file size, should return empty data + mock_ftp.size = Mock(return_value=10) + data = ftp_store.get_range("file.txt", start=20, length=10) + assert data == b"" + + def test_list_with_delimiter_no_prefix(self, ftp_store, mock_ftp): + """Test listing root directory without prefix.""" + result = ftp_store.list_with_delimiter(None) + + assert "common_prefixes" in result + assert "objects" in result + + # Should have one directory (subdir/) + assert "subdir/" in result["common_prefixes"] + + # Should have three files + file_names = [obj["path"] for obj in result["objects"]] + assert "file.txt" in file_names + assert "large.txt" in file_names + assert "empty.txt" in file_names + + # Check file metadata + file_obj = next(obj for obj in result["objects"] if obj["path"] == "file.txt") + assert file_obj["size"] == 42 + assert isinstance(file_obj["last_modified"], datetime) + + def test_list_with_delimiter_with_prefix(self, ftp_store, mock_ftp): + """Test listing with prefix filter.""" + result = ftp_store.list_with_delimiter("sub") + + # Should filter to items starting with "sub" + assert len(result["common_prefixes"]) == 1 + assert "subdir/" in result["common_prefixes"] + assert len(result["objects"]) == 0 # No files start with "sub" + + def test_list_with_delimiter_subdir(self, ftp_store, mock_ftp): + """Test listing subdirectory.""" + result = ftp_store.list_with_delimiter("subdir/") + + # Should list contents of subdir + assert len(result["common_prefixes"]) == 0 # No subdirectories in subdir + assert len(result["objects"]) == 2 # Two files in subdir + + file_names = [obj["path"] for obj in result["objects"]] + assert "subdir/foo.txt" in file_names + assert "subdir/bar.txt" in file_names + + def test_list_with_delimiter_mlsd_not_supported(self, ftp_store, mock_ftp): + """Test fallback when MLSD is not supported.""" + mock_ftp.mlsd = Mock(side_effect=ftplib.error_perm("500 MLSD not supported")) + + result = ftp_store.list_with_delimiter(None) + + # Should return empty results when MLSD fails + assert result["common_prefixes"] == [] + assert result["objects"] == [] + + def test_list_with_delimiter_timestamp_parsing(self, ftp_store, mock_ftp): + """Test parsing of MLSD timestamp with fractional seconds.""" + mock_ftp.directories["/test"] = [ + ( + "test.txt", + {"size": "100", "modify": "20231201123000.123", "type": "file"}, + ) + ] + + result = ftp_store.list_with_delimiter(None) + file_obj = result["objects"][0] + + # Should parse correctly, ignoring fractional seconds + expected_time = datetime(2023, 12, 1, 12, 30, 0) + assert file_obj["last_modified"] == expected_time + + def test_list_with_delimiter_no_modify_time(self, ftp_store, mock_ftp): + """Test when modify time is not available.""" + mock_ftp.directories["/test"] = [ + ("test.txt", {"size": "100", "type": "file"}) # No modify field + ] + + with patch("simple_httpfs._ftp.datetime") as mock_datetime: + mock_now = datetime(2023, 12, 1, 15, 0, 0) + mock_datetime.now.return_value = mock_now + mock_datetime.strptime = datetime.strptime + + result = ftp_store.list_with_delimiter(None) + file_obj = result["objects"][0] + assert file_obj["last_modified"] == mock_now + + def test_list_with_delimiter_skips_dot_entries(self, ftp_store, mock_ftp): + """Test that . and .. entries are filtered out.""" + mock_ftp.directories["/test"] = [ + (".", {"type": "dir"}), + ("..", {"type": "dir"}), + ("file.txt", {"size": "42", "modify": "20231201123000", "type": "file"}), + ] + + result = ftp_store.list_with_delimiter(None) + + # Should only have the actual file, not . or .. + assert len(result["objects"]) == 1 + assert result["objects"][0]["path"] == "file.txt" + assert len(result["common_prefixes"]) == 0 + + +class TestHelperFunctions: + """Test the helper functions.""" + + def test_resolve_search_dir_no_prefix(self): + result = _resolve_search_dir("/base/path", None) + assert result == "/base/path" + + def test_resolve_search_dir_with_directory_prefix(self): + result = _resolve_search_dir("/base/path", "subdir/") + assert result == "/base/path/subdir" + + def test_resolve_search_dir_with_file_prefix(self): + result = _resolve_search_dir("/base/path", "file.txt") + assert result == "/base/path" + + def test_resolve_search_dir_with_nested_file_prefix(self): + result = _resolve_search_dir("/base/path", "subdir/file.txt") + assert result == "/base/path/subdir" + + def test_resolve_path_no_prefix(self): + result = _resolve_path("file.txt", None) + assert result == "file.txt" + + def test_resolve_path_with_directory_prefix(self): + result = _resolve_path("file.txt", "subdir/") + assert result == "subdir/file.txt" + + result = _resolve_path("file.txt", "sub/dir/") + assert result == "sub/dir/file.txt" + + def test_resolve_path_with_incomplete_prefix(self): + result = _resolve_path("file.txt", "fi") + assert result == "file.txt" + + result = _resolve_path("file.txt", "subdir/fi") + assert result == "subdir/file.txt" + + result = _resolve_path("file.txt", "sub/dir/file.txt") + assert result == "sub/dir/file.txt" + + def test_resolve_path_with_bad_prefix(self): + result = _resolve_path("file.txt", "bad") + assert result is None + + result = _resolve_path("file.txt", "sub/dir/bad") + assert result is None + + def test_modify_time_to_datetime_basic(self): + result = _modify_time_to_datetime("20231201123000") + expected = datetime(2023, 12, 1, 12, 30, 0) + assert result == expected + + def test_modify_time_to_datetime_with_fractional_seconds(self): + result = _modify_time_to_datetime("20231201123000.123") + expected = datetime(2023, 12, 1, 12, 30, 0) + assert result == expected + + def test_path_joining(self, mock_ftp): + # Test various URL path scenarios + store1 = FTPStore("ftp://ftp.example.com/") + assert store1.path == "/" + + store2 = FTPStore("ftp://ftp.example.com/data/files/") + assert store2.path == "/data/files/" + + store3 = FTPStore("ftp://ftp.example.com/data/files") + assert store3.path == "/data/files" diff --git a/tests/test_httpfs.py b/tests/test_httpfs.py new file mode 100644 index 0000000..5785dbb --- /dev/null +++ b/tests/test_httpfs.py @@ -0,0 +1,339 @@ +from datetime import datetime +from stat import S_IFDIR, S_IFREG +from unittest.mock import Mock, patch + +import obstore +import pytest +from fuse import FuseOSError + +from simple_httpfs._ftp import FTPStore +from simple_httpfs.httpfs import HttpFs, load_store, path_to_url + + +class TestHelperFunctions: + """Test cases for helper functions.""" + + def test_path_to_url_root_path(self): + result = path_to_url("/", "...") + assert result is None + + def test_path_to_url_no_sentinel(self): + result = path_to_url("/some/path", "...") + assert result is None + + def test_path_to_url_simple_http(self): + result = path_to_url("/http:/example.com/file...", "...") + assert result == "http://example.com/file" + + def test_path_to_url_nested_path(self): + result = path_to_url("/http:/example.com.../data/file.txt...", "...") + assert result == "http://example.com/data/file.txt" + + def test_path_to_url_s3_with_bucket(self): + result = path_to_url("/s3:/my-bucket.../path/to/file.txt...", "...") + assert result == "s3://my-bucket/path/to/file.txt" + + def test_path_to_url_removes_sentinels_from_parent_dirs(self): + result = path_to_url( + "/http:/example.com.../dir.../subdir.../file.txt...", "..." + ) + assert result == "http://example.com/dir/subdir/file.txt" + + def test_load_store_http(self): + store = load_store("http://example.com/file.txt") + assert isinstance(store, obstore.store.HTTPStore) + + def test_load_store_https(self): + store = load_store("https://example.com/file.txt") + assert isinstance(store, obstore.store.HTTPStore) + + def test_load_store_ftp(self): + store = load_store("ftp://example.com/file.txt") + assert isinstance(store, FTPStore) + + def test_load_store_with_client_options(self): + client_options = {"timeout": "30s"} # obstore expects string format + store = load_store("http://example.com/file.txt", client_options=client_options) + assert isinstance(store, obstore.store.HTTPStore) + + @patch("obstore.store.from_url") + def test_load_store_other_schemes(self, mock_from_url): + mock_store = Mock() + mock_from_url.return_value = mock_store + + store = load_store("s3://bucket/file.txt") + assert store == mock_store + mock_from_url.assert_called_once_with( + "s3://bucket/file.txt", + config={"skip_signature": True}, + client_options=None, + retry_config=None, + credential_provider=None, + ) + + +class TestHttpFs: + """Test cases for HttpFs FUSE operations.""" + + @pytest.fixture + def memory_store(self): + store = obstore.store.MemoryStore() + + # Add test files + file1_data = b"Hello, World! This is test file content for HttpFs testing." + file2_data = b"Another test file with different content for verification." + + store.put("file1.txt", file1_data) + store.put("file2.txt", file2_data) + store.put("subdir/nested.txt", b"Nested file content") + + return store + + @pytest.fixture + def httpfs(self, tmp_path): + return HttpFs( + sentinel="...", + block_size=64, # Small block size for testing + disk_cache_size=1024 * 1024, # 1MB cache + disk_cache_dir=str(tmp_path / "cache"), + lru_capacity=10, + ) + + @patch("simple_httpfs.httpfs.load_store") + def test_getattr_root_directory(self, mock_load_store, httpfs): + # Root path should return directory attributes, store not used + attrs = httpfs.getattr("/") + + assert attrs["st_mode"] == (S_IFDIR | 0o555) + assert attrs["st_nlink"] == 2 + mock_load_store.assert_not_called() + + @patch("simple_httpfs.httpfs.load_store") + def test_getattr_directory_without_sentinel(self, mock_load_store, httpfs): + # Path without sentinel should return directory attrs, store not used + attrs = httpfs.getattr("/http:/example.com/some/path") + + assert attrs["st_mode"] == (S_IFDIR | 0o555) + assert attrs["st_nlink"] == 2 + mock_load_store.assert_not_called() + + def test_getattr_file_success(self, httpfs): + # Mock store with file metadata + mock_cached_store = Mock() + mock_cached_store.head.return_value = { + "e_tag": "abc123", + "last_modified": datetime(2023, 1, 1, 12, 0, 0), + "size": 59, + "path": "file1.txt", + } + with patch.object(httpfs, "_load_cached_store", return_value=mock_cached_store): + attrs = httpfs.getattr("/http:/file1.txt...") + + assert attrs["st_mode"] == (S_IFREG | 0o644) + assert attrs["st_nlink"] == 1 + assert attrs["st_size"] == 59 + assert "st_atime" in attrs + assert "st_mtime" in attrs + assert "st_ctime" in attrs + + @patch("simple_httpfs.httpfs.load_store") + def test_getattr_file_not_found(self, mock_load_store, httpfs): + # Mock store that raises FileNotFoundError + mock_cached_store = Mock() + mock_cached_store.head.side_effect = FileNotFoundError() + + with patch.object(httpfs, "_load_cached_store", return_value=mock_cached_store): + attrs = httpfs.getattr("/http:/nonexistent.txt...") + + # Should return directory attributes on file not found + assert attrs["st_mode"] == (S_IFDIR | 0o555) + assert attrs["st_nlink"] == 2 + + @patch("simple_httpfs.httpfs.load_store") + def test_readdir_root_directory(self, mock_load_store, httpfs): + # Root directory should return minimal listing + contents = httpfs.readdir("/") + + assert contents == [".", ".."] + mock_load_store.assert_not_called() + + @patch("simple_httpfs.httpfs.load_store") + def test_readdir_directory_without_sentinel(self, mock_load_store, httpfs): + # Directory without sentinel should return minimal listing + contents = httpfs.readdir("/http:/example.com/some/path") + + assert contents == [".", ".."] + mock_load_store.assert_not_called() + + @patch("simple_httpfs.httpfs.load_store") + def test_readdir_with_files_and_dirs(self, mock_load_store, httpfs): + # Mock store with files and directories + mock_cached_store = Mock() + mock_cached_store.list_with_delimiter.return_value = { + "common_prefixes": ["subdir/", "another_dir/"], + "objects": [{"path": "file1.txt"}, {"path": "file2.txt"}], + } + + with patch.object(httpfs, "_load_cached_store", return_value=mock_cached_store): + contents = httpfs.readdir("/s3:/mybucket/...") + + expected = [ + ".", + "..", + "subdir/...", + "another_dir/...", # Directories with sentinel + "file1.txt...", + "file2.txt...", # Files with sentinel + ] + assert contents == expected + + @patch("simple_httpfs.httpfs.load_store") + def test_read_root_directory(self, mock_load_store, httpfs): + # Reading root should return empty + data = httpfs.read("/", 100, 0) + + assert data == b"" + mock_load_store.assert_not_called() + + @patch("simple_httpfs.httpfs.load_store") + def test_read_directory_without_sentinel(self, mock_load_store, httpfs): + # Reading directory without sentinel should return empty + data = httpfs.read("/http:/example.com/some/path", 100, 0) + + assert data == b"" + mock_load_store.assert_not_called() + + @patch("simple_httpfs.httpfs.load_store") + def test_read_file_success(self, mock_load_store, httpfs): + # Mock cached store with file data + mock_cached_store = Mock() + test_data = b"Hello, World! This is test content." + mock_cached_store.get_range.return_value = test_data[:20] # First 20 bytes + + with patch.object(httpfs, "_load_cached_store", return_value=mock_cached_store): + data = httpfs.read("/s3:/bucket/file1.txt...", 20, 0) + + assert data == test_data[:20] + mock_cached_store.get_range.assert_called_once_with( + "s3://bucket/file1.txt", start=0, length=20 + ) + + @patch("simple_httpfs.httpfs.load_store") + def test_read_file_with_offset(self, mock_load_store, httpfs): + # Test reading with offset + mock_cached_store = Mock() + test_data = b"0123456789abcdefghij" + mock_cached_store.get_range.return_value = test_data[5:15] # Bytes 5-14 + + with patch.object(httpfs, "_load_cached_store", return_value=mock_cached_store): + data = httpfs.read("/http:/example.com/file1.txt...", 10, 5) + + assert data == test_data[5:15] + mock_cached_store.get_range.assert_called_once_with( + "http://example.com/file1.txt", start=5, length=10 + ) + + def test_init_with_custom_parameters(self, tmp_path): + cache_dir = str(tmp_path / "custom_cache") + + fs = HttpFs( + sentinel="EOL", + block_size=128, + disk_cache_size=2 * 1024 * 1024, + disk_cache_dir=cache_dir, + lru_capacity=50, + store_configs={"s3": {"region": "us-west-2"}}, + client_options={"timeout": "60s"}, + retry_config={"max_retries": 3}, + ) + + assert fs.sentinel == "EOL" + assert fs.block_size == 128 + assert fs.meta_cache.capacity == 50 + assert fs.mem_cache.capacity == 50 + assert fs.store_configs == {"s3": {"region": "us-west-2"}} + assert fs.client_options == {"timeout": "60s"} + assert fs.retry_config == {"max_retries": 3} + + def test_load_cached_store_creates_correct_store(self, httpfs): + with patch("simple_httpfs.httpfs.load_store") as mock_load_store: + mock_store = Mock() + mock_load_store.return_value = mock_store + + result = httpfs._load_cached_store("http://example.com/file.txt") + + # Should create a CachedStore with correct parameters + assert hasattr(result, "store") + assert hasattr(result, "scheme") + assert hasattr(result, "base_path") + + mock_load_store.assert_called_once_with( + "http://example.com/file.txt", + configs={}, + credential_providers={}, + client_options=None, + retry_config=None, + ) + + def test_load_cached_store_invalid_url(self, httpfs): + with pytest.raises(FuseOSError): + httpfs._load_cached_store("invalid-url-without-scheme") + + def test_statfs_returns_fake_filesystem_stats(self, httpfs): + stats = httpfs.statfs("/any/path") + + expected_block_size = 128 * 1024 + assert stats["f_frsize"] == expected_block_size + assert stats["f_bsize"] == expected_block_size + assert stats["f_blocks"] == 1024 * 1024 + assert stats["f_bfree"] == 512 * 1024 + assert stats["f_bavail"] == 512 * 1024 + assert stats["f_namemax"] == 8192 + + def test_write_operations_return_zero(self, httpfs): + # All write operations should return 0 (read-only filesystem) + result = httpfs.write("/any/path", b"data", 4, 0, None) + assert result == 0 + + def test_unimplemented_operations_are_no_ops(self, httpfs): + # These should not raise exceptions + httpfs.link("target", "source") + httpfs.symlink("target", "source") + httpfs.unlink("/path") + + @patch("simple_httpfs.httpfs.load_store") + def test_caching_behavior_with_same_url(self, mock_load_store, httpfs): + # Test that repeated access to same URL uses caching effectively + mock_store = Mock() + mock_load_store.return_value = mock_store + + # Mock the cached store behavior + mock_cached_store = Mock() + mock_cached_store.head.return_value = { + "path": "file.txt", + "size": 100, + "last_modified": datetime.now(), + "e_tag": "test-etag", + } + + with patch.object( + httpfs, "_load_cached_store", return_value=mock_cached_store + ) as mock_load_cached: + # First call + attrs1 = httpfs.getattr("/http:/example.com/file.txt...") + # Second call to same URL + attrs2 = httpfs.getattr("/http:/example.com/file.txt...") + + # Both should succeed + assert attrs1["st_mode"] == (S_IFREG | 0o644) + assert attrs2["st_mode"] == (S_IFREG | 0o644) + + # _load_cached_store should be called for each operation + # (caching happens at the CachedStore level, not HttpFs level) + assert mock_load_cached.call_count == 2 + + def test_destroy_closes_disk_cache(self, httpfs): + # Mock the disk cache to verify close is called + with patch.object(httpfs.disk_cache, "close") as mock_close: + httpfs.destroy("/") + mock_close.assert_called_once() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..886432f --- /dev/null +++ b/uv.lock @@ -0,0 +1,454 @@ +version = 1 +revision = 2 +requires-python = ">=3.10" + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662, upload-time = "2025-08-23T14:42:44.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/70/e77b0061a6c7157bfce645c6b9a715a08d4c86b3360a7b3252818080b817/coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801", size = 216774, upload-time = "2025-08-23T14:40:26.301Z" }, + { url = "https://files.pythonhosted.org/packages/91/08/2a79de5ecf37ee40f2d898012306f11c161548753391cec763f92647837b/coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a", size = 217175, upload-time = "2025-08-23T14:40:29.142Z" }, + { url = "https://files.pythonhosted.org/packages/64/57/0171d69a699690149a6ba6a4eb702814448c8d617cf62dbafa7ce6bfdf63/coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754", size = 243931, upload-time = "2025-08-23T14:40:30.735Z" }, + { url = "https://files.pythonhosted.org/packages/15/06/3a67662c55656702bd398a727a7f35df598eb11104fcb34f1ecbb070291a/coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33", size = 245740, upload-time = "2025-08-23T14:40:32.302Z" }, + { url = "https://files.pythonhosted.org/packages/00/f4/f8763aabf4dc30ef0d0012522d312f0b7f9fede6246a1f27dbcc4a1e523c/coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f", size = 247600, upload-time = "2025-08-23T14:40:33.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/31/6632219a9065e1b83f77eda116fed4c76fb64908a6a9feae41816dab8237/coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9", size = 245640, upload-time = "2025-08-23T14:40:35.248Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e2/3dba9b86037b81649b11d192bb1df11dde9a81013e434af3520222707bc8/coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3", size = 243659, upload-time = "2025-08-23T14:40:36.815Z" }, + { url = "https://files.pythonhosted.org/packages/02/b9/57170bd9f3e333837fc24ecc88bc70fbc2eb7ccfd0876854b0c0407078c3/coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879", size = 244537, upload-time = "2025-08-23T14:40:38.737Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1c/93ac36ef1e8b06b8d5777393a3a40cb356f9f3dab980be40a6941e443588/coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8", size = 219285, upload-time = "2025-08-23T14:40:40.342Z" }, + { url = "https://files.pythonhosted.org/packages/30/95/23252277e6e5fe649d6cd3ed3f35d2307e5166de4e75e66aa7f432abc46d/coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff", size = 220185, upload-time = "2025-08-23T14:40:42.026Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f2/336d34d2fc1291ca7c18eeb46f64985e6cef5a1a7ef6d9c23720c6527289/coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2", size = 216890, upload-time = "2025-08-23T14:40:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/92448b07cc1cf2b429d0ce635f59cf0c626a5d8de21358f11e92174ff2a6/coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f", size = 217287, upload-time = "2025-08-23T14:40:45.214Z" }, + { url = "https://files.pythonhosted.org/packages/96/ba/ad5b36537c5179c808d0ecdf6e4aa7630b311b3c12747ad624dcd43a9b6b/coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab", size = 247683, upload-time = "2025-08-23T14:40:46.791Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/fe3bbc8d097029d284b5fb305b38bb3404895da48495f05bff025df62770/coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c", size = 249614, upload-time = "2025-08-23T14:40:48.082Z" }, + { url = "https://files.pythonhosted.org/packages/69/9c/a1c89a8c8712799efccb32cd0a1ee88e452f0c13a006b65bb2271f1ac767/coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1", size = 251719, upload-time = "2025-08-23T14:40:49.349Z" }, + { url = "https://files.pythonhosted.org/packages/e9/be/5576b5625865aa95b5633315f8f4142b003a70c3d96e76f04487c3b5cc95/coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78", size = 249411, upload-time = "2025-08-23T14:40:50.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/e39a113d4209da0dbbc9385608cdb1b0726a4d25f78672dc51c97cfea80f/coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df", size = 247466, upload-time = "2025-08-23T14:40:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/40/cb/aebb2d8c9e3533ee340bea19b71c5b76605a0268aa49808e26fe96ec0a07/coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6", size = 248104, upload-time = "2025-08-23T14:40:54.064Z" }, + { url = "https://files.pythonhosted.org/packages/08/e6/26570d6ccce8ff5de912cbfd268e7f475f00597cb58da9991fa919c5e539/coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf", size = 219327, upload-time = "2025-08-23T14:40:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/79/79/5f48525e366e518b36e66167e3b6e5db6fd54f63982500c6a5abb9d3dfbd/coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50", size = 220213, upload-time = "2025-08-23T14:40:56.724Z" }, + { url = "https://files.pythonhosted.org/packages/40/3c/9058128b7b0bf333130c320b1eb1ae485623014a21ee196d68f7737f8610/coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82", size = 218893, upload-time = "2025-08-23T14:40:58.011Z" }, + { url = "https://files.pythonhosted.org/packages/27/8e/40d75c7128f871ea0fd829d3e7e4a14460cad7c3826e3b472e6471ad05bd/coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9", size = 217077, upload-time = "2025-08-23T14:40:59.329Z" }, + { url = "https://files.pythonhosted.org/packages/18/a8/f333f4cf3fb5477a7f727b4d603a2eb5c3c5611c7fe01329c2e13b23b678/coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b", size = 217310, upload-time = "2025-08-23T14:41:00.628Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2c/fbecd8381e0a07d1547922be819b4543a901402f63930313a519b937c668/coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c", size = 248802, upload-time = "2025-08-23T14:41:02.012Z" }, + { url = "https://files.pythonhosted.org/packages/3f/bc/1011da599b414fb6c9c0f34086736126f9ff71f841755786a6b87601b088/coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a", size = 251550, upload-time = "2025-08-23T14:41:03.438Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6f/b5c03c0c721c067d21bc697accc3642f3cef9f087dac429c918c37a37437/coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6", size = 252684, upload-time = "2025-08-23T14:41:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/f9/50/d474bc300ebcb6a38a1047d5c465a227605d6473e49b4e0d793102312bc5/coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a", size = 250602, upload-time = "2025-08-23T14:41:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/4a/2d/548c8e04249cbba3aba6bd799efdd11eee3941b70253733f5d355d689559/coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a", size = 248724, upload-time = "2025-08-23T14:41:08.429Z" }, + { url = "https://files.pythonhosted.org/packages/e2/96/a7c3c0562266ac39dcad271d0eec8fc20ab576e3e2f64130a845ad2a557b/coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34", size = 250158, upload-time = "2025-08-23T14:41:09.749Z" }, + { url = "https://files.pythonhosted.org/packages/f3/75/74d4be58c70c42ef0b352d597b022baf12dbe2b43e7cb1525f56a0fb1d4b/coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf", size = 219493, upload-time = "2025-08-23T14:41:11.095Z" }, + { url = "https://files.pythonhosted.org/packages/4f/08/364e6012d1d4d09d1e27437382967efed971d7613f94bca9add25f0c1f2b/coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f", size = 220302, upload-time = "2025-08-23T14:41:12.449Z" }, + { url = "https://files.pythonhosted.org/packages/db/d5/7c8a365e1f7355c58af4fe5faf3f90cc8e587590f5854808d17ccb4e7077/coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8", size = 218936, upload-time = "2025-08-23T14:41:13.872Z" }, + { url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106, upload-time = "2025-08-23T14:41:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353, upload-time = "2025-08-23T14:41:16.656Z" }, + { url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350, upload-time = "2025-08-23T14:41:18.128Z" }, + { url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955, upload-time = "2025-08-23T14:41:19.577Z" }, + { url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230, upload-time = "2025-08-23T14:41:20.959Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387, upload-time = "2025-08-23T14:41:22.644Z" }, + { url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280, upload-time = "2025-08-23T14:41:24.061Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894, upload-time = "2025-08-23T14:41:26.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536, upload-time = "2025-08-23T14:41:27.694Z" }, + { url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330, upload-time = "2025-08-23T14:41:29.081Z" }, + { url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961, upload-time = "2025-08-23T14:41:30.511Z" }, + { url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819, upload-time = "2025-08-23T14:41:31.962Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040, upload-time = "2025-08-23T14:41:33.472Z" }, + { url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374, upload-time = "2025-08-23T14:41:34.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551, upload-time = "2025-08-23T14:41:36.333Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776, upload-time = "2025-08-23T14:41:38.25Z" }, + { url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326, upload-time = "2025-08-23T14:41:40.343Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090, upload-time = "2025-08-23T14:41:42.106Z" }, + { url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217, upload-time = "2025-08-23T14:41:43.591Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194, upload-time = "2025-08-23T14:41:45.051Z" }, + { url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258, upload-time = "2025-08-23T14:41:46.44Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521, upload-time = "2025-08-23T14:41:47.882Z" }, + { url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090, upload-time = "2025-08-23T14:41:49.327Z" }, + { url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365, upload-time = "2025-08-23T14:41:50.796Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413, upload-time = "2025-08-23T14:41:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943, upload-time = "2025-08-23T14:41:53.922Z" }, + { url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301, upload-time = "2025-08-23T14:41:56.528Z" }, + { url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302, upload-time = "2025-08-23T14:41:58.171Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237, upload-time = "2025-08-23T14:41:59.703Z" }, + { url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726, upload-time = "2025-08-23T14:42:01.343Z" }, + { url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825, upload-time = "2025-08-23T14:42:03.263Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618, upload-time = "2025-08-23T14:42:05.037Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199, upload-time = "2025-08-23T14:42:06.662Z" }, + { url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833, upload-time = "2025-08-23T14:42:08.262Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048, upload-time = "2025-08-23T14:42:10.247Z" }, + { url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549, upload-time = "2025-08-23T14:42:11.811Z" }, + { url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715, upload-time = "2025-08-23T14:42:13.505Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969, upload-time = "2025-08-23T14:42:15.422Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408, upload-time = "2025-08-23T14:42:16.971Z" }, + { url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168, upload-time = "2025-08-23T14:42:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317, upload-time = "2025-08-23T14:42:20.005Z" }, + { url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600, upload-time = "2025-08-23T14:42:22.027Z" }, + { url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714, upload-time = "2025-08-23T14:42:23.616Z" }, + { url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735, upload-time = "2025-08-23T14:42:25.156Z" }, + { url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736, upload-time = "2025-08-23T14:42:43.145Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "diskcache" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "fusepy" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/0b/4506cb2e831cea4b0214d3625430e921faaa05a7fb520458c75a2dbd2152/fusepy-3.0.1.tar.gz", hash = "sha256:72ff783ec2f43de3ab394e3f7457605bf04c8cf288a2f4068b4cde141d4ee6bd", size = 11519, upload-time = "2018-09-17T00:14:52.666Z" } + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "mypy" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/a9/3d7aa83955617cdf02f94e50aab5c830d205cfa4320cf124ff64acce3a8e/mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972", size = 11003299, upload-time = "2025-07-31T07:54:06.425Z" }, + { url = "https://files.pythonhosted.org/packages/83/e8/72e62ff837dd5caaac2b4a5c07ce769c8e808a00a65e5d8f94ea9c6f20ab/mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7", size = 10125451, upload-time = "2025-07-31T07:53:52.974Z" }, + { url = "https://files.pythonhosted.org/packages/7d/10/f3f3543f6448db11881776f26a0ed079865926b0c841818ee22de2c6bbab/mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df", size = 11916211, upload-time = "2025-07-31T07:53:18.879Z" }, + { url = "https://files.pythonhosted.org/packages/06/bf/63e83ed551282d67bb3f7fea2cd5561b08d2bb6eb287c096539feb5ddbc5/mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390", size = 12652687, upload-time = "2025-07-31T07:53:30.544Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/68f2eeef11facf597143e85b694a161868b3b006a5fbad50e09ea117ef24/mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94", size = 12896322, upload-time = "2025-07-31T07:53:50.74Z" }, + { url = "https://files.pythonhosted.org/packages/a3/87/8e3e9c2c8bd0d7e071a89c71be28ad088aaecbadf0454f46a540bda7bca6/mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b", size = 9507962, upload-time = "2025-07-31T07:53:08.431Z" }, + { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" }, + { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" }, + { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" }, + { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" }, + { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" }, + { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" }, + { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" }, + { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/aec2fc9b9b149f372850291827537a508d6c4d3664b1750a324b91f71355/mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", size = 11075338, upload-time = "2025-07-31T07:53:38.873Z" }, + { url = "https://files.pythonhosted.org/packages/07/ac/ee93fbde9d2242657128af8c86f5d917cd2887584cf948a8e3663d0cd737/mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", size = 10113066, upload-time = "2025-07-31T07:54:14.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/68/946a1e0be93f17f7caa56c45844ec691ca153ee8b62f21eddda336a2d203/mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", size = 11875473, upload-time = "2025-07-31T07:53:14.504Z" }, + { url = "https://files.pythonhosted.org/packages/9f/0f/478b4dce1cb4f43cf0f0d00fba3030b21ca04a01b74d1cd272a528cf446f/mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", size = 12744296, upload-time = "2025-07-31T07:53:03.896Z" }, + { url = "https://files.pythonhosted.org/packages/ca/70/afa5850176379d1b303f992a828de95fc14487429a7139a4e0bdd17a8279/mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", size = 12914657, upload-time = "2025-07-31T07:54:08.576Z" }, + { url = "https://files.pythonhosted.org/packages/53/f9/4a83e1c856a3d9c8f6edaa4749a4864ee98486e9b9dbfbc93842891029c2/mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", size = 9593320, upload-time = "2025-07-31T07:53:01.341Z" }, + { url = "https://files.pythonhosted.org/packages/38/56/79c2fac86da57c7d8c48622a05873eaab40b905096c33597462713f5af90/mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733", size = 11040037, upload-time = "2025-07-31T07:54:10.942Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c3/adabe6ff53638e3cad19e3547268482408323b1e68bf082c9119000cd049/mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd", size = 10131550, upload-time = "2025-07-31T07:53:41.307Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c5/2e234c22c3bdeb23a7817af57a58865a39753bde52c74e2c661ee0cfc640/mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0", size = 11872963, upload-time = "2025-07-31T07:53:16.878Z" }, + { url = "https://files.pythonhosted.org/packages/ab/26/c13c130f35ca8caa5f2ceab68a247775648fdcd6c9a18f158825f2bc2410/mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a", size = 12710189, upload-time = "2025-07-31T07:54:01.962Z" }, + { url = "https://files.pythonhosted.org/packages/82/df/c7d79d09f6de8383fe800521d066d877e54d30b4fb94281c262be2df84ef/mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91", size = 12900322, upload-time = "2025-07-31T07:53:10.551Z" }, + { url = "https://files.pythonhosted.org/packages/b8/98/3d5a48978b4f708c55ae832619addc66d677f6dc59f3ebad71bae8285ca6/mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed", size = 9751879, upload-time = "2025-07-31T07:52:56.683Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "obspec" +version = "0.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e2/94/7a9ad6927cac6ec7680e11772fb692145a05a93bafd80b84f6f0ef12f4e7/obspec-0.1.0.tar.gz", hash = "sha256:b189781a53f82ef8d6abf0c9e77fd4c46ac9f244d5a91eb35ee61c2e2b204a4a", size = 117254, upload-time = "2025-06-25T05:24:00.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/69/96feeac84ce0b871567225c78515f3b557c023e72ed9b4f1833f3662bd6b/obspec-0.1.0-py3-none-any.whl", hash = "sha256:307f0fa2c2998b324ecf0eed6a2a89049a4c40c9b1fa2b5e1af28f0ee72136b3", size = 15231, upload-time = "2025-06-25T05:23:58.735Z" }, +] + +[[package]] +name = "obstore" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/f2/573413e09e57054a08cd16779310862dcc0fa3c7e790e8384c24b8465dbf/obstore-0.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8d4849e3c954bbc940b949ad48dcada1b2b5accf63e6d926ed46eb0bcdbd157a", size = 3680739, upload-time = "2025-08-07T21:11:35.996Z" }, + { url = "https://files.pythonhosted.org/packages/d2/44/2ffadd22d9bccb11403e75f4240c89411085d621a50ba11d29a5aae3167f/obstore-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:398d75c1f028ebd114a720105fbddd4efc4fec41e898a65f51f739f0594a0923", size = 3400471, upload-time = "2025-08-07T21:11:38.161Z" }, + { url = "https://files.pythonhosted.org/packages/11/4a/1a6ca04ec53a5c180156dab3add3ae4660bb1df26c03d446c108d9dd8853/obstore-0.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:96f2c1617af785bbcb9b554fcc18a85cf22dd9f1f4d91c5a987e74f3a41f108d", size = 3455444, upload-time = "2025-08-07T21:11:39.674Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ed/82964598ee1c6b5aec11b4b031302460f3021db46301a08ad4e3b2f78fee/obstore-0.8.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75a6a0716e282dacc287ccfe299f6929e9e798edf3e766b6d0278dd8cc90dd41", size = 3686983, upload-time = "2025-08-07T21:11:41.114Z" }, + { url = "https://files.pythonhosted.org/packages/30/be/85004df68b78bc9618d3a2afc6283692da65b584e7284ae8070312bfeb5c/obstore-0.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62f63b1030056859bf460237ea45722a79014fcf846a9e7d9c907185fb006a2f", size = 3957131, upload-time = "2025-08-07T21:11:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/45/cc/a05031f92a7f7b09d904ea2964ea59e19d596bddbbc1a5c9af6bd7815ff0/obstore-0.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18694b8169ae744b5aaedddaf3ebb81a3d6f191760e48a323f603305efac4fa0", size = 3927359, upload-time = "2025-08-07T21:11:44.042Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ad/e8c57ff027b79d4f633b070a7eb98b1b6db6feb581914944179db642f29e/obstore-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb7acec0799b0e9618aaa7d1116ee3956476bec5b617b5da19ff8090f97a339d", size = 3766835, upload-time = "2025-08-07T21:11:45.46Z" }, + { url = "https://files.pythonhosted.org/packages/04/1e/4831f68d148e7223c9aa1d434056ed19fbabf1c46e8a3291b8a1dcdfde35/obstore-0.8.0-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:9c5baa6ab2f6ac5dc82b0ba8bc6ef1857f62e96183e0e9c922f3082d748139cf", size = 3534199, upload-time = "2025-08-07T21:11:46.941Z" }, + { url = "https://files.pythonhosted.org/packages/aa/15/72ab6895f6cccde679648edf46de2f0f111637b35793f6bf1d6a04e48e93/obstore-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ca2d288afa0426ad3932adb9088d1e82efaa6ed889903e8ae8851faa32df7df1", size = 3699155, upload-time = "2025-08-07T21:11:48.248Z" }, + { url = "https://files.pythonhosted.org/packages/ed/26/1e687e5b4a00c93d42d01c5b17b6861cb1241999b638046ca51188082fc5/obstore-0.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:7ce0ad35d1dc35482da7d6919d7ac67812680c547afd454d17479e5e63593dc0", size = 3676246, upload-time = "2025-08-07T21:11:49.503Z" }, + { url = "https://files.pythonhosted.org/packages/fa/4d/48b1be8ae9f2305c8a3e315ea6ba92644e185fa371d2c8352a974aedc1df/obstore-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bee56f5a01562f0f34227e97675e2d4663c97c5be8c185c0a1072ea47f068316", size = 3764811, upload-time = "2025-08-07T21:11:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/2d/62/291c51524ba2077c3dbca5200df511f9895b3f1747bff367eead82fe3a86/obstore-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0fd20db78ad943beaef56075ca7eebf7289ba3a2f6c42797adb54fc010ddc2c7", size = 3939313, upload-time = "2025-08-07T21:11:52.615Z" }, + { url = "https://files.pythonhosted.org/packages/b9/df/3fe682c500fca433f61c07a1f42be74024008538bc9655b9ddd388b14d79/obstore-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d081bc190ecb0d76011ebf5df8200f2819840bac174dd76c67b82e75641881fe", size = 4038159, upload-time = "2025-08-07T21:11:53.743Z" }, + { url = "https://files.pythonhosted.org/packages/5a/02/ea61e85c34d56e54a2a5ac56e66a767dfbca4cc573454f3501986174eb59/obstore-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:abff29a885e7958fb009bc393855fb3fb04bd95c6754c768e1374f704b961c25", size = 3681028, upload-time = "2025-08-07T21:11:54.914Z" }, + { url = "https://files.pythonhosted.org/packages/db/92/fcf6add23cb0b107a3d196cf04561a8496ff76a03caf2b6efa7c1851b5e5/obstore-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:32647fe9238d707ae98c6337ab86c144f2432a1ca302e58759b4acec1a9f7234", size = 3401005, upload-time = "2025-08-07T21:11:56.146Z" }, + { url = "https://files.pythonhosted.org/packages/a4/2f/c81d497777db82626cfdcf8d3d885269b0c8eeeb9bce2fb7485adcec4764/obstore-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d413a0beb5a1c84b705889b1246e37f5f77b4337b2c353d40e3764ffa04c9ff", size = 3454584, upload-time = "2025-08-07T21:11:57.555Z" }, + { url = "https://files.pythonhosted.org/packages/4f/47/298a0acc8f5c5aaf4dfe2c41a23667207802baffa8ef41443b7ee059a5bb/obstore-0.8.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f3a305a5ed3b0a2355085a8e1891c552d6051d954f2364d7caa2a4e3b992662", size = 3687185, upload-time = "2025-08-07T21:11:59.168Z" }, + { url = "https://files.pythonhosted.org/packages/8a/c3/67ff71c34127a266b2399ec64255124438afc9aef476b07a00861b9fd6da/obstore-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a873d06d05a309f0e8bb047143c308af9c64fc6dae5442aa826c634120397807", size = 3957273, upload-time = "2025-08-07T21:12:00.699Z" }, + { url = "https://files.pythonhosted.org/packages/98/94/a242d90051a65c83ad9e56391faf6e3b0316d737aecc27dc465010d2ebad/obstore-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8d8c59e584006348fff5d4e4084a72a2ae94eaeecc46aa8acc59fc4b496dd20", size = 3927131, upload-time = "2025-08-07T21:12:02.305Z" }, + { url = "https://files.pythonhosted.org/packages/c2/57/4333fc09670bd3c3fc3f7d6e4115671b0946dff15509be9b261326dda7b4/obstore-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae33a2d6e014027e4929cc2ec79444a23a3cb57efbc2ab9fe86ddc16e702b7cb", size = 3766558, upload-time = "2025-08-07T21:12:03.744Z" }, + { url = "https://files.pythonhosted.org/packages/5a/d1/691e2961cf2c0ca38170a0fd9aa6a1e165d52a8bc82122dae08637d75c27/obstore-0.8.0-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:4117874b1d40857606c0cc07018984857f3b5965ef3df4ed77516c8d3675b645", size = 3534461, upload-time = "2025-08-07T21:12:04.937Z" }, + { url = "https://files.pythonhosted.org/packages/6a/43/e406105639f0216d3cb4a939162bec06e852e856e4b80437f477d51f60d2/obstore-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:749f8692d3eae7311e771186a2b3eb2b9f16c67bd661cedf0cfd4a4d3214994d", size = 3699263, upload-time = "2025-08-07T21:12:06.409Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/1f5c93777aa644b2309e19e5477b0f6322cd80cb6b0f14c7dcbf82290345/obstore-0.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ef940f02b6b31254a105748e16b19da40d3ad8f6869f3aaf298e52dbc6a61c57", size = 3675538, upload-time = "2025-08-07T21:12:07.683Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c3/0a9f73264b158505edf6d63bbff07e7ac3e057a3812bac98d3f935734d8a/obstore-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01fbab7ccad08f3fbc34b0aaa5c27d8ecfb5e1cb5917f694fa1c82b83236ea3e", size = 3765039, upload-time = "2025-08-07T21:12:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/bb/95/bd1ce67430887d2123c8361f7b2003cbde61c6288b5827c2eeecc8487542/obstore-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7ff857509ef22c30d3e8b297c2e6d2fc1ee181b6d69e35afed7c4f815e882e49", size = 3939451, upload-time = "2025-08-07T21:12:11.479Z" }, + { url = "https://files.pythonhosted.org/packages/61/ec/1558e72130739341afec53e2038dd19750051a20b9192f73c8f58d41d5b7/obstore-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:9a4cb01f985812fd6b24c06e650d41d3f626c682ed2ef2d5d67000274705e1ba", size = 4037860, upload-time = "2025-08-07T21:12:12.681Z" }, + { url = "https://files.pythonhosted.org/packages/7f/27/6b19fe6fa12312c7deb3dceaa55074fcc3796f8b6517f0450685b6133d21/obstore-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:678148bd3cbab4c1132e9a5e83272151dd4319964c166202db3bfadd0ae4d340", size = 3677531, upload-time = "2025-08-07T21:12:13.859Z" }, + { url = "https://files.pythonhosted.org/packages/82/c3/5247cb4fc0d41edf9ce99d5956b31ba8eefc7ab647bdf499104b7de98310/obstore-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c3e55257a7322fd43a445b5c196d388a0dd97d65b1f86e4eb48e6e0b5f41ffed", size = 3389393, upload-time = "2025-08-07T21:12:15.477Z" }, + { url = "https://files.pythonhosted.org/packages/bf/12/42ca5d5fd82f726d0fb83abe3e4ef88ded5e1a226d52e3f368389bbd993c/obstore-0.8.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a7d349cd0cdd73bc1047658950fd888ae192e0bd9c38ff78f788b92fa72b0fd", size = 3457808, upload-time = "2025-08-07T21:12:16.971Z" }, + { url = "https://files.pythonhosted.org/packages/8a/82/17179f5d9d664e5b184378ada6c9a1ab8f3a106ce4503ac1c18ded7773ba/obstore-0.8.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b70321122ea74e02d8cc6689f773fcfc42f040ee108c1401e34d9a34a74256", size = 3692420, upload-time = "2025-08-07T21:12:18.119Z" }, + { url = "https://files.pythonhosted.org/packages/9d/39/4fc71a22bb104fc50c46665b72c767acfbde452ed46ddff62a145bae7891/obstore-0.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b90b373d5b0019d0866821cd99838c68bf7adad9bb6c929643618a765f417559", size = 3954093, upload-time = "2025-08-07T21:12:19.5Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/7901a7b0fc1442dbb9119ada616125117d58f4fefc4055e87e72debb8ba2/obstore-0.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbc5923ab8fd929145ab0d93e96c19623779f5530eec980f3bd7c6d33042c87f", size = 3937149, upload-time = "2025-08-07T21:12:20.754Z" }, + { url = "https://files.pythonhosted.org/packages/33/19/1a4d341d388b7ebaa0f5050555e5c984807309c71681e1033098476a0c38/obstore-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:790cb8be172e5393fc107eae32f8c563dbf542062e4d39c153e2360460475cf1", size = 3766290, upload-time = "2025-08-07T21:12:22.41Z" }, + { url = "https://files.pythonhosted.org/packages/bd/8c/2b9c8b32bea783ad3afe79b97c4e7554de843ccca3476e1b0ff3bc5b8454/obstore-0.8.0-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:84ab8e6e6204e54a0739d0b35b8bec60529bd04f1a2bef1f9fc97841baa9640d", size = 3530631, upload-time = "2025-08-07T21:12:23.949Z" }, + { url = "https://files.pythonhosted.org/packages/72/41/17a79686e8a5ae286ce02c2a4019c31f3752a9eafd6d0687f23d07759de7/obstore-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4338790f76a3419e9262e6f5ab017c21a9d37bf4c5cf788d27ea4375c30057d1", size = 3694094, upload-time = "2025-08-07T21:12:25.132Z" }, + { url = "https://files.pythonhosted.org/packages/d6/1f/025e7d508b7e4caef4878f29ed28b629d9b359d5f84a98aa1bb142c554a6/obstore-0.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:89bacc384d1bb347a9cf357ce37f632f78d0bc4a642ab3788d12363889a48078", size = 3678782, upload-time = "2025-08-07T21:12:26.393Z" }, + { url = "https://files.pythonhosted.org/packages/b3/26/043992542a97e3a1f5e2e2039fba86dff0cad5d865fab0f88e72b91804da/obstore-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:933ede27179a244427c297778e9b4b8d483e588811325c527b132bf9196f8aaf", size = 3772620, upload-time = "2025-08-07T21:12:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/93/32/b43f249f8a3bf4c35efaa2239c4c6d5191716a154d4587d36ee7e197c841/obstore-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d5753afa949abcd7f4b90e661b66ef67fd5c4c4779ad2340543c0a6dffdd4a5", size = 3936848, upload-time = "2025-08-07T21:12:28.998Z" }, + { url = "https://files.pythonhosted.org/packages/c6/af/0f986154ced82bd9a7153521650100d729a36544d773eaa01e3f500dd267/obstore-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6dc8a9fd495cbcc5af55b501ebfb97b1bb00708d3cf584f8b56049488e9ccc4a", size = 4047881, upload-time = "2025-08-07T21:12:30.183Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d3/5fe5a2143967bf0d92118d83010dc896d729e57b21d00599ba4952c19a10/obstore-0.8.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1c79fdd581719131be46c69f0f70e9fb4da3e9e6f4cb9255838d67a28198cdd8", size = 3677065, upload-time = "2025-08-07T21:12:31.485Z" }, + { url = "https://files.pythonhosted.org/packages/b4/41/23d9d7c14507eed7e75aefbe13ab1097e8312a2211ba6129c26eb6178c19/obstore-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b31a94e1da14f062385fd9538be0c9a7ef7f43368fc33b019d87af5a6364bace", size = 3389589, upload-time = "2025-08-07T21:12:32.668Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9d/52f84c78e638d62432ca65e56e48fc29d19c27efaa6d61b6f33ea3045690/obstore-0.8.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26c39549d9edc162d5c434b6b12a6cd8f0f058e57b5334002d650aaa47a4e22c", size = 3457427, upload-time = "2025-08-07T21:12:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/03/39/8911d24b754bb46636c546c5a4d23ee7e551cc9675edc9e86c51f14ae502/obstore-0.8.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df501a821d298759b2ab0691c08318de30912a39e7ae2660c3d4a68b80309c1c", size = 3692278, upload-time = "2025-08-07T21:12:35.535Z" }, + { url = "https://files.pythonhosted.org/packages/fe/68/062a2a8a386c0fdfe6db6e8bf4f3574c7695cd52b2c9b9a732dcf44d522a/obstore-0.8.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30729866bb873d04a162975bd4b4d09b95af1ad0ee755df6cf2c99c087cc471a", size = 3953609, upload-time = "2025-08-07T21:12:37.106Z" }, + { url = "https://files.pythonhosted.org/packages/db/69/0df001372699cc58be2ca1d32896c965f4a0ae91fc5a33e346ee727d945c/obstore-0.8.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e02bdc01ef35ed73108b28a898e167d1ea938605e11360addaf47e36b5e488f", size = 3945800, upload-time = "2025-08-07T21:12:38.693Z" }, + { url = "https://files.pythonhosted.org/packages/98/ea/a8cfb814b9575fc5e78ff4bdd3329bbb73ec73abc188e37a43c92fba386d/obstore-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e7e49a5e8ec2adeeb721e1e8162a5bbfa50025cc93370d1ae2c6c234bff18f", size = 3765634, upload-time = "2025-08-07T21:12:39.955Z" }, + { url = "https://files.pythonhosted.org/packages/d1/5a/443d8e790387f5bdee7ac29439c6fbf721a9c590eaf70cbbbbb021eda364/obstore-0.8.0-cp313-cp313-manylinux_2_24_aarch64.whl", hash = "sha256:ec6eaed4b8e1484908e6be2e92690117e942c753dc430d9134eebb63049ab9ec", size = 3530217, upload-time = "2025-08-07T21:12:41.237Z" }, + { url = "https://files.pythonhosted.org/packages/4e/a7/ab60e4644f52c77a76f4583bfa74e1b2b96efb25f94b915e9064de43a1a6/obstore-0.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed984b040ad6f6e1fa316f0ab4cb95b92d7a301ff13d1afda61cb06b10067ace", size = 3694008, upload-time = "2025-08-07T21:12:42.871Z" }, + { url = "https://files.pythonhosted.org/packages/0b/9a/f64ccec0bebe32c0ee1f007a3f53f83768cd4e590d8de33e942ead063bc4/obstore-0.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4201d51a16fe0f1fbad2bd7bd3e402c59e68fa3dfb4b12eb74e4b3cbfaae285a", size = 3678544, upload-time = "2025-08-07T21:12:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/ee/fd/ea0395df45767153a189a54bf80e0caf0cd57d9842e016f60b79139bb624/obstore-0.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:15c94a07abdaa8bb5136ab23a0f3b87c7b094166ea008859c1ff00dba2221f8e", size = 3772225, upload-time = "2025-08-07T21:12:45.93Z" }, + { url = "https://files.pythonhosted.org/packages/f1/bc/0f353b3bba595f3ca0991736fe1e5aab39ff11e4d2f5399458dc9e8e443b/obstore-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58afe9b94cc6c8565e3b51518bcccbace5beaba9ac91cc03f3b2d4193ad4bddb", size = 3936694, upload-time = "2025-08-07T21:12:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/49a80f8f8c19aaab1953c95ee9d23d135091950940de3221062585d73046/obstore-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:d5b976c0021b8b5807f314ed4766c64e6998fdd533b4da7421d1b6a313c3e3fd", size = 4047458, upload-time = "2025-08-07T21:12:48.484Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ba/a90745593241c89b20d908edec7af9e8788c3526055206ab81bb6159d6bc/obstore-0.8.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:12c216238644da80d225fe8f3edd865aa3f7905efd75bbe6098be9fac3ea5409", size = 3681329, upload-time = "2025-08-07T21:13:07.914Z" }, + { url = "https://files.pythonhosted.org/packages/df/1b/3b14cb825bf1d528a7cbe6601a16fc7e50462ced6f6b50be91279f50e1c0/obstore-0.8.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:e26839a4f0406de4585750461692e956dc2c5e757a060597b695ce3dcfdc82d8", size = 3401194, upload-time = "2025-08-07T21:13:09.185Z" }, + { url = "https://files.pythonhosted.org/packages/7f/57/d3998c408f5744e092e112f7ecae4b66e97d9a52561a4455845f29ae148d/obstore-0.8.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c4abd3edd59e8bfa317ef88047bde8c20a0784642cb9f9ff4d0b630f778aad5", size = 3454589, upload-time = "2025-08-07T21:13:10.539Z" }, + { url = "https://files.pythonhosted.org/packages/1d/2f/80d3bb97e27263f33a7f739ee7aea60377ecdeec7f40c2498002275f33bd/obstore-0.8.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e21410498548aa1d66275e8d496feaba995310a6f4c50503d9ce57f388586419", size = 3686695, upload-time = "2025-08-07T21:13:11.792Z" }, + { url = "https://files.pythonhosted.org/packages/88/fb/deda4113ad80903abf6bd6b71360794995e74d98265e637dc3aaf50b14de/obstore-0.8.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40c60739527d704da6c126859812293279e157c0074de70cb0674cb3d5b84d29", size = 3957601, upload-time = "2025-08-07T21:13:13.122Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e8/8dc0bb4d020c3d1f68bf1cf14076290e57dc6a6af38cbe25bff57cb0ed14/obstore-0.8.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61c8a24222eddeb31bdd2e7f90b6f7f640e91c638ca93c9cac116ce5cd9f4be3", size = 3928263, upload-time = "2025-08-07T21:13:14.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/0c/6b399c2af47ba68b80134867ac779c25dfe42bd30ede2f563ac54e3406ff/obstore-0.8.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1077de67521fdac4bc9c96ac13ae6a47d08bc75ff5348137bb9b2d472595b222", size = 3769085, upload-time = "2025-08-07T21:13:16.105Z" }, + { url = "https://files.pythonhosted.org/packages/94/2e/985568dcbb9881a0cf33a6bbe197761b4b7b3b10453f296b92bf4b3fd31b/obstore-0.8.0-pp310-pypy310_pp73-manylinux_2_24_aarch64.whl", hash = "sha256:ce6d62c614c08e28dbc5ac6664fde207faac0f36eb77aa3247daff61aed7d31c", size = 3534452, upload-time = "2025-08-07T21:13:17.478Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5d/98549f40f913d46fd42bf1a24d0d4c2dc30117a5b9f76c6dac46e5e59049/obstore-0.8.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7664656839e65369ec29ac4c81d39be3b004141c0e49b90a6cb7bdea774251d2", size = 3698495, upload-time = "2025-08-07T21:13:18.882Z" }, + { url = "https://files.pythonhosted.org/packages/9b/12/eb69bab93b62c7805f45903f3d73138c3b8094f25b830955b77f651cec9b/obstore-0.8.0-pp310-pypy310_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:2b39e9c1f705199c7195bd8de2b90cf51f41cdc085d76df3cf6d0f89d79c1ac9", size = 3675426, upload-time = "2025-08-07T21:13:20.152Z" }, + { url = "https://files.pythonhosted.org/packages/87/7c/498e2a97b7bfe4849a2bb68be5cbeb49bddb55cf190d7a7596c64fa3dbf1/obstore-0.8.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7cf7583133933f5f686bfdc2af1fd9970530383dc67fd9eb859e6846becce1f3", size = 3764518, upload-time = "2025-08-07T21:13:21.491Z" }, + { url = "https://files.pythonhosted.org/packages/10/73/552d97cad1b4d7776439670d8f6573dc7948c432be0a3c3782ad55115e3d/obstore-0.8.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:90bae3f473816edc8d11b3e9020e21c50167815686d326198cb31d144fd4dec4", size = 3940359, upload-time = "2025-08-07T21:13:22.826Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, +] + +[[package]] +name = "ruff" +version = "0.12.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/55/16ab6a7d88d93001e1ae4c34cbdcfb376652d761799459ff27c1dc20f6fa/ruff-0.12.11.tar.gz", hash = "sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d", size = 5347103, upload-time = "2025-08-28T13:59:08.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/a2/3b3573e474de39a7a475f3fbaf36a25600bfeb238e1a90392799163b64a0/ruff-0.12.11-py3-none-linux_armv6l.whl", hash = "sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065", size = 11979885, upload-time = "2025-08-28T13:58:26.654Z" }, + { url = "https://files.pythonhosted.org/packages/76/e4/235ad6d1785a2012d3ded2350fd9bc5c5af8c6f56820e696b0118dfe7d24/ruff-0.12.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93", size = 12742364, upload-time = "2025-08-28T13:58:30.256Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0d/15b72c5fe6b1e402a543aa9d8960e0a7e19dfb079f5b0b424db48b7febab/ruff-0.12.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd", size = 11920111, upload-time = "2025-08-28T13:58:33.677Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c0/f66339d7893798ad3e17fa5a1e587d6fd9806f7c1c062b63f8b09dda6702/ruff-0.12.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee", size = 12160060, upload-time = "2025-08-28T13:58:35.74Z" }, + { url = "https://files.pythonhosted.org/packages/03/69/9870368326db26f20c946205fb2d0008988aea552dbaec35fbacbb46efaa/ruff-0.12.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0", size = 11799848, upload-time = "2025-08-28T13:58:38.051Z" }, + { url = "https://files.pythonhosted.org/packages/25/8c/dd2c7f990e9b3a8a55eee09d4e675027d31727ce33cdb29eab32d025bdc9/ruff-0.12.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644", size = 13536288, upload-time = "2025-08-28T13:58:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/7a/30/d5496fa09aba59b5e01ea76775a4c8897b13055884f56f1c35a4194c2297/ruff-0.12.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211", size = 14490633, upload-time = "2025-08-28T13:58:42.285Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2f/81f998180ad53445d403c386549d6946d0748e536d58fce5b5e173511183/ruff-0.12.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793", size = 13888430, upload-time = "2025-08-28T13:58:44.641Z" }, + { url = "https://files.pythonhosted.org/packages/87/71/23a0d1d5892a377478c61dbbcffe82a3476b050f38b5162171942a029ef3/ruff-0.12.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee", size = 12913133, upload-time = "2025-08-28T13:58:47.039Z" }, + { url = "https://files.pythonhosted.org/packages/80/22/3c6cef96627f89b344c933781ed38329bfb87737aa438f15da95907cbfd5/ruff-0.12.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8", size = 13169082, upload-time = "2025-08-28T13:58:49.157Z" }, + { url = "https://files.pythonhosted.org/packages/05/b5/68b3ff96160d8b49e8dd10785ff3186be18fd650d356036a3770386e6c7f/ruff-0.12.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f", size = 13139490, upload-time = "2025-08-28T13:58:51.593Z" }, + { url = "https://files.pythonhosted.org/packages/59/b9/050a3278ecd558f74f7ee016fbdf10591d50119df8d5f5da45a22c6afafc/ruff-0.12.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000", size = 11958928, upload-time = "2025-08-28T13:58:53.943Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bc/93be37347db854806904a43b0493af8d6873472dfb4b4b8cbb27786eb651/ruff-0.12.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2", size = 11764513, upload-time = "2025-08-28T13:58:55.976Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a1/1471751e2015a81fd8e166cd311456c11df74c7e8769d4aabfbc7584c7ac/ruff-0.12.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39", size = 12745154, upload-time = "2025-08-28T13:58:58.16Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/2542b14890d0f4872dd81b7b2a6aed3ac1786fae1ce9b17e11e6df9e31e3/ruff-0.12.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9", size = 13227653, upload-time = "2025-08-28T13:59:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/2fbfc61047dbfd009c58a28369a693a1484ad15441723be1cd7fe69bb679/ruff-0.12.11-py3-none-win32.whl", hash = "sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3", size = 11944270, upload-time = "2025-08-28T13:59:02.347Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/34276984705bfe069cd383101c45077ee029c3fe3b28225bf67aa35f0647/ruff-0.12.11-py3-none-win_amd64.whl", hash = "sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd", size = 13046600, upload-time = "2025-08-28T13:59:04.751Z" }, + { url = "https://files.pythonhosted.org/packages/84/a8/001d4a7c2b37623a3fd7463208267fb906df40ff31db496157549cfd6e72/ruff-0.12.11-py3-none-win_arm64.whl", hash = "sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea", size = 12135290, upload-time = "2025-08-28T13:59:06.933Z" }, +] + +[[package]] +name = "simple-httpfs" +version = "0.5.0" +source = { editable = "." } +dependencies = [ + { name = "diskcache" }, + { name = "fusepy" }, + { name = "obspec" }, + { name = "obstore" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] + +[package.optional-dependencies] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "diskcache" }, + { name = "fusepy" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.0" }, + { name = "obspec" }, + { name = "obstore" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=6.0" }, + { name = "pytest-cov", marker = "extra == 'dev'" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.12.11" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +provides-extras = ["dev"] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, +] diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 64fea1c..0000000 --- a/versioneer.py +++ /dev/null @@ -1,1822 +0,0 @@ - -# Version: 0.18 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) - -This is a tool for managing a recorded version number in distutils-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) -* run `versioneer install` in your source tree, commit the results - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -""" - -from __future__ import print_function -try: - import configparser -except ImportError: - import ConfigParser as configparser -import errno -import json -import os -import re -import subprocess -import sys - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) - VCS = parser.get("versioneer", "VCS") # mandatory - - def get(parser, name): - if parser.has_option("versioneer", name): - return parser.get("versioneer", name) - return None - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -LONG_VERSION_PY['git'] = ''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%s*" % tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], - cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: - pass - if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except EnvironmentError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 - - cmds = {} - - # we add "version" to both distutils and setuptools - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -INIT_PY_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - - -def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (EnvironmentError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except EnvironmentError: - old = "" - if INIT_PY_SNIPPET not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except EnvironmentError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1)